Skip to content
This repository was archived by the owner on Dec 11, 2025. It is now read-only.

Commit 96a186e

Browse files
authored
Merge pull request #235 from didi/docker
ci and delta using tf2.3.0
2 parents c5b8f8c + e521d13 commit 96a186e

31 files changed

+97
-57
lines changed

.travis.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ services:
66
before_install:
77
- export DELTA_PATH=`pwd`; echo $DELTA_PATH
88
- export DOCKER_DELTA="/home/gitlab-runner/delta"; echo $DOCKER_DELTA
9-
- export CI_IMAGE=zh794390558/delta:ci-cpu-py3
9+
- export CI_IMAGE=zh794390558/delta:2.3.0-ci-cpu-py3
1010
- docker pull ${CI_IMAGE}
1111
- docker run -it -d --name travis_con --user root -v ${DELTA_PATH}:${DOCKER_DELTA} ${CI_IMAGE} bash
1212
- docker exec travis_con bash -c "gcc -v && g++ -v"

README.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,9 +118,9 @@ To verify the installation, run:
118118

119119
```shell
120120
# Activate conda environment
121-
conda activate delta-py3.6-tf2.0.0
121+
conda activate delta-py3.6-tf2.3.0
122122
# Or use the following command if your conda version is < 4.6
123-
# source activate delta-py3.6-tf2.0.0
123+
# source activate delta-py3.6-tf2.3.0
124124

125125
# Add DELTA environment
126126
source env.sh

core/ops/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ endif
88
#$(info $(MAKEFILE_DIR))
99
#$(info $(MAIN_ROOT))
1010

11-
CXX := g++
11+
CXX := g++-7
1212
NVCC := nvcc
1313
PYTHON_BIN_PATH= python3
1414
CC :=

delta/data/feat/speech_ops_test.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -209,13 +209,13 @@ def test_splice(self):
209209
self.assertTupleEqual(out.eval().shape, (1, 3, 2 * ctx))
210210
self.assertAllEqual(out, tf.ones([1, 3, 2 * ctx]))
211211

212-
with self.assertRaises(ValueError):
212+
with self.assertRaises(tf.errors.InvalidArgumentError):
213213
out = tffeat.splice(feat, left_context=-2, right_context=-2).eval()
214214

215-
with self.assertRaises(ValueError):
215+
with self.assertRaises(tf.errors.InvalidArgumentError):
216216
out = tffeat.splice(feat, left_context=2, right_context=-2).eval()
217217

218-
with self.assertRaises(ValueError):
218+
with self.assertRaises(tf.errors.InvalidArgumentError):
219219
out = tffeat.splice(feat, left_context=-2, right_context=2).eval()
220220

221221

delta/data/preprocess/base_preparer.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -144,7 +144,7 @@ def prepare_one_raw_data(self, one_path, one_path_after, mode,
144144
if self.multi_output:
145145
for i in range(self.output_num):
146146
label_ds = label[i].batch(self.batch_size)
147-
label_iterator = label_ds.make_initializable_iterator()
147+
label_iterator = tf.data.make_initializable_iterator(label_ds)
148148
label_after_arr = self.run_dataset(label_iterator, batch_num)
149149
label_after_one = [
150150
one_line.decode("utf-8") for one_line in label_after_arr
@@ -154,7 +154,7 @@ def prepare_one_raw_data(self, one_path, one_path_after, mode,
154154
else:
155155
label = label[0]
156156
label_ds = label.batch(self.batch_size)
157-
label_iterator = label_ds.make_initializable_iterator()
157+
label_iterator = tf.data.make_initializable_iterator(label_ds)
158158
label_after_arr = self.run_dataset(label_iterator, batch_num)
159159
one_label_after = [
160160
one_line.decode("utf-8") for one_line in label_after_arr

delta/data/preprocess/text_ops.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ def load_textline_dataset(paths, column_num):
101101
"""Load raw data for text task."""
102102
ds = tf.data.TextLineDataset(paths)
103103
ds = ds.map(
104-
lambda x: tf.strings.split(x, sep="\t", result_type="RaggedTensor"))
104+
lambda x: tf.squeeze(tf.strings.split(x, sep="\t", result_type="RaggedTensor"), axis = 0))
105105
ds = ds.filter(lambda line: tf.equal(tf.size(line), column_num))
106106
ds_list = []
107107
for i in range(column_num):

delta/data/preprocess/text_ops_test.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -121,7 +121,7 @@ def test_process_one_label_dataset(self):
121121
true_res = [0, 0, 0, 8]
122122
label_ds = process_one_label_dataset(label_ds, self.config)
123123

124-
iterator = label_ds.make_initializable_iterator()
124+
iterator = tf.data.make_initializable_iterator(label_ds)
125125
label_res = iterator.get_next()
126126

127127
with tf.Session() as sess:
@@ -139,7 +139,7 @@ def test_process_multi_label_dataset(self):
139139
label_ds = tf.data.TextLineDataset(label_filepath)
140140
true_res = [[0, 8, 8], [0, 7, 8]]
141141
label_ds = process_multi_label_dataset(label_ds, self.config)
142-
iterator = label_ds.make_initializable_iterator()
142+
iterator = tf.data.make_initializable_iterator(label_ds)
143143
label_res = iterator.get_next()
144144

145145
with tf.Session() as sess:

delta/data/preprocess/utils.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ def get_pre_process_text_ds_iter(
3737

3838
text_ds = text_ds.batch(batch_size)
3939

40-
iterator = text_ds.make_initializable_iterator()
40+
iterator = tf.data.make_initializable_iterator(text_ds)
4141

4242
return iterator
4343

delta/data/task/text_cls_task.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ def dataset(self):
184184

185185
data_set = data_set.prefetch(self.num_prefetch_batch)
186186

187-
iterator = data_set.make_initializable_iterator()
187+
iterator = tf.data.make_initializable_iterator(data_set)
188188

189189
# pylint: disable=unused-variable
190190
if self.infer_without_label:

delta/data/task/text_match_task.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -170,7 +170,7 @@ def dataset(self):
170170

171171
text_ds_left_right = text_ds_left_right.prefetch(self.num_prefetch_batch)
172172

173-
iterator = text_ds_left_right.make_initializable_iterator()
173+
iterator = tf.data.make_initializable_iterator(text_ds_left_right)
174174
# pylint: disable=unused-variable
175175
if self.infer_without_label:
176176
(input_x_left, input_x_right), (input_x_left_len,

0 commit comments

Comments
 (0)