Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions .pylintrc
Original file line number Diff line number Diff line change
Expand Up @@ -32,13 +32,14 @@ callbacks=cb_,_cb
bad-functions=map,filter

# Good variable names which should always be accepted, separated by a comma
# x, y, X, Y - vector maxtrix of features and labels.
# x, y, X, Y, Z - vector maxtrix of features and labels.
# P, p - probability distribution
# qs - query strategry
# w, W - weight vector
# N - number of instances
# T, K - number of iterations
good-names=i,j,k,_,X,Y,x,y,P,p,qs,w,W,N,T,K
# up, down - numerator and denominator of the fraction
good-names=i,j,k,_,X,Y,Z,x,y,P,p,qs,w,W,N,T,K,up,down

# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
Expand Down
22 changes: 22 additions & 0 deletions docs/.readthedocs.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
# .readthedocs.yaml
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details

# Required
version: 2

# Set the version of Python and other tools you might need
build:
os: ubuntu-22.04
tools:
python: "3.11"

# Build documentation in the docs/ directory with Sphinx
sphinx:
configuration: docs/conf.py

# We recommend specifying your dependencies to enable reproducible builds:
# https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html
python:
install:
- requirements: docs/rtd-requirements.txt
10 changes: 0 additions & 10 deletions libact/base/interfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,6 @@ def update(self, entry_id, label):
label : float
The label of the queried sample.
"""
pass

def _get_scores(self):
"""Return the score used for making query, the larger the better. Read-only.
Expand All @@ -48,7 +47,6 @@ def _get_scores(self):
(ask_id, scores): list of tuple (int, float)
The index of the next unlabeled sample to be queried and the score assigned.
"""
pass

@abstractmethod
def make_query(self):
Expand All @@ -61,7 +59,6 @@ def make_query(self):
ask_id : int
The index of the next unlabeled sample to be queried and labeled.
"""
pass


class Labeler(with_metaclass(ABCMeta, object)):
Expand All @@ -84,7 +81,6 @@ def label(self, feature):
label : int
The class label of the queried feature.
"""
pass


class Model(with_metaclass(ABCMeta, object)):
Expand All @@ -108,7 +104,6 @@ def train(self, dataset, *args, **kwargs):
self : object
Returns self.
"""
pass

@abstractmethod
def predict(self, feature, *args, **kwargs):
Expand All @@ -124,7 +119,6 @@ def predict(self, feature, *args, **kwargs):
y_pred : array-like, shape (n_samples,)
The class labels for samples in the feature array.
"""
pass

@abstractmethod
def score(self, testing_dataset, *args, **kwargs):
Expand All @@ -141,7 +135,6 @@ def score(self, testing_dataset, *args, **kwargs):
score : float
Mean accuracy of self.predict(X) wrt. y.
"""
pass


class MultilabelModel(Model):
Expand All @@ -150,7 +143,6 @@ class MultilabelModel(Model):
A Model returns a multilabel-predicting function for future samples after
trained on a training dataset.
"""
pass


class ContinuousModel(Model):
Expand Down Expand Up @@ -183,7 +175,6 @@ def predict_real(self, feature, *args, **kwargs):
Each entry is the confidence scores per (sample, class)
combination.
"""
pass


class ProbabilisticModel(ContinuousModel):
Expand All @@ -210,4 +201,3 @@ def predict_proba(self, feature, *args, **kwargs):
X : array-like, shape (n_samples, n_classes)
Each entry is the prabablity estimate for each class.
"""
pass
2 changes: 1 addition & 1 deletion libact/base/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ def test_get_labeled_entries(self):

def test_get_unlabeled_entries(self):
dataset = self.setup_dataset()
idx, X = dataset.get_unlabeled_entries()
_, X = dataset.get_unlabeled_entries()
self.assertTrue(np.array_equal(X[0], np.array([6, 7, 8])))
self.assertTrue(np.array_equal(X[1], np.array([12, 13, 14])))

Expand Down
4 changes: 2 additions & 2 deletions libact/labelers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,6 @@
from .ideal_labeler import IdealLabeler
try:
from .interactive_labeler import InteractiveLabeler
except ImportError:
except ImportError as import_error:
raise ImportError("Error importing matplotlib."
"InteractiveLabeler not supported.")
"InteractiveLabeler not supported.") from import_error
11 changes: 6 additions & 5 deletions libact/labelers/ideal_labeler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@ class IdealLabeler(Labeler):

"""

def __init__(self, dataset, **kwargs):
def __init__(self, dataset):
super().__init__()
X, y = dataset.get_entries()
# make sure the input dataset is fully labeled
assert (np.array(y) != np.array(None)).all()
Expand All @@ -29,7 +30,7 @@ def __init__(self, dataset, **kwargs):

@inherit_docstring_from(Labeler)
def label(self, feature):
yy = self.y[np.where([np.array_equal(x, feature)
for x in self.X])[0]]
ind = np.arange(len(yy))
return yy[np.random.choice(ind, 1)[0]]
labels = self.y[np.where([np.array_equal(x, feature)
for x in self.X])[0]]
ind = np.arange(len(labels))
return labels[np.random.choice(ind, 1)[0]]
1 change: 1 addition & 0 deletions libact/labelers/interactive_labeler.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ class InteractiveLabeler(Labeler):
"""

def __init__(self, **kwargs):
super().__init__()
self.label_name = kwargs.pop('label_name', None)

@inherit_docstring_from(Labeler)
Expand Down
2 changes: 1 addition & 1 deletion libact/models/logistic_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ class LogisticRegression(ProbabilisticModel):
"""

def __init__(self, *args, **kwargs):
super().__init__()
self.model = sklearn.linear_model.LogisticRegression(*args, **kwargs)

def train(self, dataset, *args, **kwargs):
Expand All @@ -38,4 +39,3 @@ def predict_real(self, feature, *args, **kwargs):

def predict_proba(self, feature, *args, **kwargs):
return self.model.predict_proba(feature, *args, **kwargs)

1 change: 1 addition & 0 deletions libact/models/perceptron.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ class Perceptron(Model):
"""

def __init__(self, *args, **kwargs):
super().__init__()
self.model = sklearn.linear_model.Perceptron(*args, **kwargs)

def train(self, dataset, *args, **kwargs):
Expand Down
8 changes: 7 additions & 1 deletion libact/models/sklearn_adapter.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""scikit-learn classifier adapter
"""
from sklearn.base import clone
from libact.base.interfaces import Model, ContinuousModel, ProbabilisticModel
from libact.base.interfaces import Model, ProbabilisticModel


class SklearnAdapter(Model):
Expand Down Expand Up @@ -37,6 +37,7 @@ class SklearnAdapter(Model):
"""

def __init__(self, clf):
super().__init__()
self._model = clf

def train(self, dataset, *args, **kwargs):
Expand All @@ -50,6 +51,8 @@ def score(self, testing_dataset, *args, **kwargs):
**kwargs)

def clone(self):
"""Constructs a new untrained model with the same parameters.
"""
return SklearnProbaAdapter(clone(self._model))


Expand Down Expand Up @@ -89,6 +92,7 @@ class SklearnProbaAdapter(ProbabilisticModel):
"""

def __init__(self, clf):
super().__init__()
self._model = clf

def train(self, dataset, *args, **kwargs):
Expand All @@ -108,4 +112,6 @@ def predict_proba(self, feature, *args, **kwargs):
return self._model.predict_proba(feature, *args, **kwargs)

def clone(self):
"""Constructs a new untrained model with the same parameters.
"""
return SklearnProbaAdapter(clone(self._model))
14 changes: 8 additions & 6 deletions libact/models/svm.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
An interface for scikit-learn's C-Support Vector Classifier model.
"""
import logging
LOGGER = logging.getLogger(__name__)

import numpy as np
import sklearn.svm
Expand All @@ -12,6 +11,9 @@
from libact.base.interfaces import ContinuousModel


LOGGER = logging.getLogger(__name__)


class SVM(ContinuousModel):

"""C-Support Vector Machine Classifier
Expand Down Expand Up @@ -46,8 +48,8 @@ def predict_real(self, feature, *args, **kwargs):
dvalue = self.model.decision_function(feature, *args, **kwargs)
if len(np.shape(dvalue)) == 1: # n_classes == 2
return np.vstack((-dvalue, dvalue)).T
else:
if self.decision_function_shape != 'ovr':
LOGGER.warn("SVM model support only 'ovr' for multiclass"
"predict_real.")
return dvalue

if self.decision_function_shape != 'ovr':
LOGGER.warning("SVM model support only 'ovr' for multiclass"
"predict_real.")
return dvalue
2 changes: 2 additions & 0 deletions libact/query_strategies/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
"""
from __future__ import absolute_import

# pylint: disable=wrong-import-position

import os
ON_RTD = os.environ.get('READTHEDOCS', None) == 'True'
import logging
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ def make_query(self):
return unlabeled_entry_ids[ask_id]

class DensityWeightedLogisticRegression(object):
"""Density Weighted Logistic Regression
r"""Density Weighted Logistic Regression

Density Weighted Logistice Regression is used in DWUS to estimate the
probability of representing which label for each cluster.
Expand Down
Loading