blob: d6bd25464295b260cdadee6e2db4c2b9540b0330 [file] [log] [blame]
lm_file=example/example.trigram.lm.gz
tm_file=example/example.hiero.tm.gz
tm_format=hiero
glue_file=grammars/hiero.glue
glue_format=hiero
#lm config
use_srilm=true
lm_ceiling_cost=100
use_left_equivalent_state=false
use_right_equivalent_state=false
order=3
#tm config
span_limit=10
phrase_owner=pt
mono_owner=mono
begin_mono_owner=begin_mono
default_non_terminal=X
goalSymbol=S
#pruning config
fuzz1=0.1
fuzz2=0.1
max_n_items=30
relative_threshold=10.0
max_n_rules=50
rule_relative_threshold=10.0
#nbest config
use_unique_nbest=true
use_tree_nbest=false
add_combined_cost=true
top_n=300
#remoter lm server config,we should first prepare remote_symbol_tbl before starting any jobs
use_remote_lm_server=false
remote_symbol_tbl=./voc.remote.sym
num_remote_lm_servers=4
f_remote_server_list=./remote.lm.server.list
remote_lm_server_port=9000
#parallel deocoder: it cannot be used together with remote lm
num_parallel_decoders=1
parallel_files_prefix=/tmp/
###### model weights
#lm order weight
lm 1.000000
#phrasemodel owner column(0-indexed) weight
phrasemodel pt 0 1.066893
phrasemodel pt 1 0.752247
phrasemodel pt 2 0.589793
#arityphrasepenalty owner start_arity end_arity weight
#arityphrasepenalty pt 0 0 1.0
#arityphrasepenalty pt 1 2 -1.0
#phrasemodel mono 0 0.5
#wordpenalty weight
wordpenalty -2.844814