forked from zhaocq-nlp/NJUNMT-tf
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathinfer.sh
35 lines (31 loc) · 870 Bytes
/
infer.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
#!/usr/bin/env bash
export CUDA_VISIBLE_DEVICES=
DATA_DIR=./testdata
MODEL_DIR=./test_models
VOCAB_SOURCE=${DATA_DIR}/vocab.zh
VOCAB_TARGET=${DATA_DIR}/vocab.en
BATCH_SIZE=13
BEAM_SIZE=5
DELIMITER=" "
MAXIMUM_LABELS_LENGTH=30
CHAR_LEVEL=false
python -m bin.infer \
--model_dir ${MODEL_DIR} \
--infer "
batch_size: ${BATCH_SIZE}
beam_size: ${BEAM_SIZE}
maximum_labels_length: ${MAXIMUM_LABELS_LENGTH}
delimiter: ${DELIMITER}
source_words_vocabulary: ${VOCAB_SOURCE}
target_words_vocabulary: ${VOCAB_TARGET}
source_bpecodes:
target_bpecodes:
char_level: ${CHAR_LEVEL}" \
--infer_data "
- features_file: ${DATA_DIR}/toy.zh
output_file: ./mt02.trans
output_attention: true
- features_file: ${DATA_DIR}/toy.zh
labels_file: ${DATA_DIR}/mt02.ref
output_file: ./heheda
output_attention: false"