File size: 5,801 Bytes
a3a1ef7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
#! /bin/sh
S=cyberagent/open-calm-1b
T=KoichiYasuoka/open-calm-1b-ud-causal
U=https://github.com/UniversalDependencies/UD_Japanese-GSDLUW
D=`basename $U`
test -d $D || git clone --depth=1 $U
for F in train dev test
do cp $D/*-$F.conllu $F.conllu
done

TMPA=./maker$$a.py
( echo '#! /usr/bin/python3'
  echo 'src="'$S'"'
  cat << 'EOF'
import json,unicodedata
from transformers import AutoTokenizer
tkz=AutoTokenizer.from_pretrained(src,cls_token="<|endoftext|>",sep_token="<|endoftext|>",mask_token="<|endoftext|>",model_max_length=2048)
tkz.save_pretrained("tmpdir")
d=json.loads(tkz.backend_tokenizer.to_str())
form=set()
with open("train.conllu","r",encoding="utf-8") as r:
  for s in r:
    w=s.split("\t")
    if len(w)==10 and w[0].isdecimal():
      form.add(w[1])
m=[t for t in d["model"]["merges"] if len(t)<5]
for i in range(len(tkz)):
  w=tkz.decode(i)
  if len(w)==2 and w in form and not unicodedata.name(w[0]).startswith("HIRAGANA"):
    k=tkz([w[0],w[1]],add_special_tokens=False)["input_ids"]
    if len(k[0])==1 and len(k[1])==1:
      m.append(" ".join(tkz.convert_ids_to_tokens([k[0][0],k[1][0]])))
d["model"]["merges"]=m
tkz.backend_tokenizer.from_str(json.dumps(d)).save("tmpdir/tokenizer.json")
EOF
) > $TMPA
chmod 755 $TMPA
$TMPA

TMPB=./maker$$b.py
( echo '#! /usr/bin/env deepspeed'
  echo 'src="'$S'"'
  echo 'tgt="'$T'"'
  cat << 'EOF'
from transformers import PreTrainedTokenizerFast,AutoConfig,GPTNeoXForTokenClassification,DefaultDataCollator,TrainingArguments,Trainer

class UDCausalDataset(object):
  def __init__(self,conllu,tokenizer,embeddings=None):
    self.conllu=open(conllu,"r",encoding="utf-8")
    self.tokenizer=tokenizer
    self.embeddings=embeddings
    self.max_tokens=3
    self.seeks=[(0,0)]
    label=set(["SYM"])
    dep=set()
    s=self.conllu.readline()
    while s!="":
      if s=="\n":
        self.seeks.append((self.conllu.tell(),0))
      else:
        w=s.split("\t")
        if len(w)==10:
          if w[0].isdecimal():
            p=w[3] if w[5]=="_" else w[3]+"|"+w[5]
            label.add(p)
            dep.add(p+("|" if w[6]=="0" else "|l-" if int(w[0])<int(w[6]) else "|r-")+w[7])
            self.seeks.append((self.seeks[-1][0],int(w[0])))
            self.max_tokens=max(self.max_tokens,int(w[0])*2+1)
      s=self.conllu.readline()
    lid={}
    for i,l in enumerate(sorted(label)):
      lid[l],lid["B-"+l],lid["I-"+l]=i*3,i*3+1,i*3+2
    for i,d in enumerate(sorted(dep),len(lid)):
      lid[d]=i
    self.label2id=lid
  def __call__(*args):
    lid={l:i for i,l in enumerate(sorted(set(sum([list(t.label2id) for t in args],[]))))}
    for t in args:
      t.label2id=lid
    return lid
  def __del__(self):
    self.conllu.close()
  __len__=lambda self:len(self.seeks)-1
  def __getitem__(self,i):
    s,t=self.seeks[i]
    self.conllu.seek(s)
    form,upos,deps,w=[],[],[],[""]
    while w[0]!="\n":
      w=self.conllu.readline().split("\t")
      if len(w)==10:
        form.append(w[1])
        if w[0].isdecimal():
          upos.append(w[3] if w[5]=="_" else w[3]+"|"+w[5])
          deps.append((int(w[6]),w[7]))
    v=self.tokenizer(form,add_special_tokens=False)
    if t==0:
      i,u=[],[]
      for j,(x,y) in enumerate(zip(v["input_ids"],upos)):
        if x!=[]:
          i+=x
          u+=[y] if len(x)==1 else ["B-"+y]+["I-"+y]*(len(x)-1)
      emb=self.embeddings
      pad=self.tokenizer.pad_token_id
    else:
      import torch
      m=[]
      for x in v["input_ids"]:
        if x==[]:
          m.append(self.embeddings[self.tokenizer.unk_token_id,:])
        else:
          m.append(self.embeddings[x,:].sum(axis=0))
      m.append(self.embeddings[self.tokenizer.sep_token_id,:])
      m.append(self.embeddings[self.tokenizer.pad_token_id,:])
      emb=torch.stack(m)
      i,u=list(range(len(upos)+1)),upos+["SYM"]
      i.append(t-1)
      k,d=deps[t-1]
      u.append(upos[t-1]+"|"+d if k==0 else upos[t-1])
      for j in range(t,len(upos)):
        i.append(j)
        a,b=deps[j]
        u.append(upos[j]+"|r-"+b if a==t else upos[t-1]+"|l-"+d if j+1==k else upos[j])
      pad=-1
    j=self.max_tokens-len(i)
    if j>0:
      ids=i+[pad]*j
      upos=u+["SYM"]*j
    else:
      ids=i[0:self.max_tokens]
      upos=u[0:self.max_tokens]
    return {"inputs_embeds":emb[ids,:],"labels":[self.label2id[p] for p in upos]}

tkz=PreTrainedTokenizerFast.from_pretrained("tmpdir")
trainDS=UDCausalDataset("train.conllu",tkz)
devDS=UDCausalDataset("dev.conllu",tkz)
testDS=UDCausalDataset("test.conllu",tkz)
lid=trainDS(devDS,testDS)
cfg=AutoConfig.from_pretrained(src,num_labels=len(lid),label2id=lid,id2label={i:l for l,i in lid.items()},ignore_mismatched_sizes=True)
mdl=GPTNeoXForTokenClassification.from_pretrained(src,config=cfg,ignore_mismatched_sizes=True)
trainDS.embeddings=mdl.get_input_embeddings().weight
trainDS.max_tokens=min(trainDS.max_tokens,cfg.max_position_embeddings)
dsp={"fp16":{"enabled":"auto"},"optimizer":{"type":"AdamW"},"scheduler":{"type":"WarmupLR","params":{}},"train_batch_size":"auto","train_micro_batch_size_per_gpu":"auto","zero_optimization":{"stage":3,"offload_optimizer":{"device":"cpu","pin_memory":True},"offload_param":{"device":"cpu","pin_memory":True},"overlap_comm":True,"contiguous_gradients":True,"reduce_bucket_size":"auto","stage3_prefetch_bucket_size":"auto","stage3_param_persistence_threshold":"auto","stage3_gather_16bit_weights_on_model_save":True}}
arg=TrainingArguments(num_train_epochs=3,per_device_train_batch_size=16,deepspeed=dsp,output_dir=tgt,overwrite_output_dir=True,save_total_limit=2,learning_rate=5e-05,warmup_ratio=0.1,save_safetensors=False)
trn=Trainer(args=arg,data_collator=DefaultDataCollator(),model=mdl,train_dataset=trainDS)
trn.train()
trn.save_model(tgt)
tkz.save_pretrained(tgt)
EOF
) > $TMPB
chmod 755 $TMPB
$TMPB
exit