Caffe2 - Python API
A deep learning, cross platform ML framework
sparse_feature_hash.py
1 ## @package sparse_feature_hash
2 # Module caffe2.python.layers.sparse_feature_hash
3 from __future__ import absolute_import
4 from __future__ import division
5 from __future__ import print_function
6 from __future__ import unicode_literals
7 
8 from caffe2.python import schema, core
9 from caffe2.python.layers.layers import (
10  ModelLayer,
11  IdList,
12  IdScoreList,
13 )
14 from caffe2.python.layers.tags import (
15  Tags
16 )
17 
18 import numpy as np
19 
20 
21 class SparseFeatureHash(ModelLayer):
22 
23  def __init__(self, model, input_record, seed=0, modulo=None,
24  use_hashing=True, name='sparse_feature_hash', **kwargs):
25  super(SparseFeatureHash, self).__init__(model, name, input_record, **kwargs)
26 
27  self.seed = seed
28  self.use_hashing = use_hashing
29  if schema.equal_schemas(input_record, IdList):
30  self.modulo = modulo or self.extract_hash_size(input_record.items.metadata)
31  metadata = schema.Metadata(
32  categorical_limit=self.modulo,
33  feature_specs=input_record.items.metadata.feature_specs,
34  expected_value=input_record.items.metadata.expected_value
35  )
36  with core.NameScope(name):
37  self.output_schema = schema.NewRecord(model.net, IdList)
38  self.output_schema.items.set_metadata(metadata)
39 
40  elif schema.equal_schemas(input_record, IdScoreList):
41  self.modulo = modulo or self.extract_hash_size(input_record.keys.metadata)
42  metadata = schema.Metadata(
43  categorical_limit=self.modulo,
44  feature_specs=input_record.keys.metadata.feature_specs,
45  expected_value=input_record.keys.metadata.expected_value
46  )
47  with core.NameScope(name):
48  self.output_schema = schema.NewRecord(model.net, IdScoreList)
49  self.output_schema.keys.set_metadata(metadata)
50 
51  else:
52  assert False, "Input type must be one of (IdList, IdScoreList)"
53 
54  assert self.modulo >= 1, 'Unexpected modulo: {}'.format(self.modulo)
55 
56  # operators in this layer do not have CUDA implementation yet.
57  # In addition, since the sparse feature keys that we are hashing are
58  # typically on CPU originally, it makes sense to have this layer on CPU.
59  self.tags.update([Tags.CPU_ONLY])
60 
61  def extract_hash_size(self, metadata):
62  if metadata.feature_specs and metadata.feature_specs.desired_hash_size:
63  return metadata.feature_specs.desired_hash_size
64  elif metadata.categorical_limit is not None:
65  return metadata.categorical_limit
66  else:
67  assert False, "desired_hash_size or categorical_limit must be set"
68 
69  def add_ops(self, net):
70  net.Copy(
71  self.input_record.lengths(),
72  self.output_schema.lengths()
73  )
74  if schema.equal_schemas(self.output_schema, IdList):
75  input_blob = self.input_record.items()
76  output_blob = self.output_schema.items()
77  elif schema.equal_schemas(self.output_schema, IdScoreList):
78  input_blob = self.input_record.keys()
79  output_blob = self.output_schema.keys()
80  net.Copy(
81  self.input_record.values(),
82  self.output_schema.values()
83  )
84  else:
85  raise NotImplementedError()
86 
87  if self.use_hashing:
88  net.IndexHash(
89  input_blob, output_blob, seed=self.seed, modulo=self.modulo
90  )
91  else:
92  net.Mod(
93  input_blob, output_blob, divisor=self.modulo, sign_follow_divisor=True
94  )