jpohhhh commited on
Commit
29a469a
1 Parent(s): a154b3b

Try skipping normalization

Browse files

Completely off-topic embeddings got a slight relevance increase from
switching to philschmid onnx (0.01 -> 0.03) - curious if this causes it

Files changed (1) hide show
  1. handler.py +0 -2
handler.py CHANGED
@@ -33,7 +33,5 @@ class EndpointHandler():
33
  outputs = self.model(**encoded_inputs)
34
  # Perform pooling
35
  sentence_embeddings = mean_pooling(outputs, encoded_inputs['attention_mask'])
36
- # Normalize embeddings
37
- sentence_embeddings = F.normalize(sentence_embeddings, p=2, dim=1)
38
  # postprocess the prediction
39
  return sentence_embeddings.tolist()
 
33
  outputs = self.model(**encoded_inputs)
34
  # Perform pooling
35
  sentence_embeddings = mean_pooling(outputs, encoded_inputs['attention_mask'])
 
 
36
  # postprocess the prediction
37
  return sentence_embeddings.tolist()