BigDL
1c6f0e5cb844
add container
yangw
2 days ago

166 167 168 169 170 171 172 173 174 175 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190
:return: A pre-trained model. """ jmodel = callBigDlFunc(bigdl_type, "loadCaffe", model, defPath, modelPath, match_all) return Model.of(jmodel)
class Container(Model): ''' [[Container]] is a sub-class of Model that declares methods defined in all containers. A container usually contain some other modules which can be added through the "add" method ''' def __init__(self, jvalue, bigdl_type, *args): super(Container, self).__init__(jvalue, bigdl_type, *args) def add(self, model): self.value.add(model.value) return self
class Linear(Model): ''' The [[Linear]] module applies a linear transformation to the input data,
...
254 255 256 257 258 259 260 261 262 263 264 269 270 271 272 273 274 275 276 277 278 279
def __init__(self, bigdl_type="float"): super(LogSoftMax, self).__init__(None, bigdl_type)
class Sequential(Model):
class Sequential(Container):
''' Sequential provides a means to plug layers together in a feed-forward fully connected manner.
...
272 273 274 275 276 277 278 279 280 281 282 283 284 285 287 288 289 290 291 292 293 294 295 296
''' def __init__(self, bigdl_type="float"): super(Sequential, self).__init__(None, bigdl_type)
def add(self, model): self.value.add(model.value) return self
class SpatialConvolution(Model): '''
...
382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413
''' def __init__(self, dim, index, bigdl_type="float"): super(Select, self).__init__(None, bigdl_type, dim, index)
class Recurrent(Model):
class Recurrent(Container):
''' Recurrent module is a container of rnn cells Different types of rnn cells can be added using add() function >>> recurrent = Recurrent() creating: createRecurrent ''' def __init__(self, bigdl_type="float"): super(Recurrent, self).__init__(None, bigdl_type)
''' Add a recurrent kernel such as RnnCell, LSTM, GRU, etc. to be a recurrent module ''' def add(self, model): self.value.add(model.value) return self
class LSTM(Model): ''' Long Short Term Memory architecture.
...
520 521 522 523 524 525 526 527 528 529 530 523 524 525 526 527 528 529 530 531 532 533
def __init__(self, model, bigdl_type="float"): super(TimeDistributed, self).__init__(None, bigdl_type, model)
class Concat(Model):
class Concat(Container):
''' Concat concatenates the output of one layer of "parallel" modules along the provided {@code dimension}: they take the same inputs, and their output is concatenated.
...
827 828 829 830 831 832 833 834 835 836 837 830 831 832 833 834 835 836 837 838 839 840
input_size2, output_size, bias_res)
class Bottle(Model):
class Bottle(Container):
''' Bottle allows varying dimensionality input to be forwarded through any module that accepts input of nInputDim dimensions, and generates output of nOutputDim dimensions. :param module: transform module
...
1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488
bigdl_type="float"): super(MV, self).__init__(None, bigdl_type, trans)
class MapTable(Model):
class MapTable(Container):
''' This class is a container for a single module which will be applied to all input elements. The member module is cloned as necessary to process all input elements.
...
1783 1784 1785 1786 1787 1788 1789 1790 1791 1792 1793 1786 1787 1788 1789 1790 1791 1792 1793 1794 1795 1796
bigdl_type="float"): super(PairwiseDistance, self).__init__(None, bigdl_type, norm)
class ParallelTable(Model):
class ParallelTable(Container):
''' It is a container module that applies the i-th member module to the i-th input, and outputs an output in the form of Table
...
1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973 1974
bigdl_type="float"): super(Scale, self).__init__(None, bigdl_type, size)
class SelectTable(Model):
class SelectTable(Container):
''' Creates a module that takes a table as input and outputs the element at index `index` (positive or negative). This can be either a table or a Tensor. The gradients of the non-index elements are zeroed Tensors of the same size.
...
2523 2524 2525 2526 2527 2528 2529 2530 2531 2532 2533 2526 2527 2528 2529 2530 2531 2532 2533 2534 2535 2536
def __init__(self, size, batch_mode=None, bigdl_type="float"): super(Reshape, self).__init__(None, bigdl_type, size, batch_mode)
class BiRecurrent(Model):
class BiRecurrent(Container):
''' Create a Bidirectional recurrent layer :param merge: merge layer
...
2541 2542 2543 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554 2555 2556 2557 2558 2559 2560 2544 2545 2546 2547 2548 2549 2550 2551 2552 2553 2554 2555
def __init__(self, merge=None, bigdl_type="float"): super(BiRecurrent, self).__init__(None, bigdl_type, merge)
''' Add a recurrent kernel such as RnnCell, LSTM, GRU, etc. to be a recurrent module ''' def add(self, model): self.value.add(model.value) return self
class ConcatTable(Model):
class ConcatTable(Container):
''' ConcateTable is a container module like Concate. Applies an input to each member module, input can be a tensor or a table. ConcateTable usually works with CAddTable and CMulTable to
About FluentSend Feedback