diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7..0000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/README.md b/README.md index 5254b61..1dc515a 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ DeepAA ==== -This is convolutional neural networks generating ASCII art. +This is convolutional neural network that generates ASCII art from an image. This repository is under construction. This work is accepted by [NIPS 2017 Workshop, Machine Learning for Creativity and Design](https://nips2017creativity.github.io/) @@ -28,7 +28,7 @@ The paper: [ASCII Art Synthesis with Convolutional Networks](https://nips2017cre ) ## How to use -please change the line 15 of `output.py ` +please change the line 15 of `output.py` ``` image_path = 'sample images/original images/21 original.png' # put the path of the image that you convert. @@ -36,10 +36,10 @@ image_path = 'sample images/original images/21 original.png' # put the path of t into the path of image file that you use. You should use a grayscale line image. -then run `output.py `. -converted images will be output at `output/ `. +then run `output.py`. +converted images will be output at `output/`. -You can select light model by change the line 13, 14 of `output.py ` into +You can select light model by change the line 13, 14 of `output.py` into ``` model_path = "model/model_light.json" weight_path = "model/weight_light.hdf5" diff --git a/output.py b/output.py index ea1bfcf..6fa8da3 100644 --- a/output.py +++ b/output.py @@ -43,7 +43,7 @@ def pickleload(path): print("len(char_list)", len(char_list)) # print(char_list.head()) char_list = char_list[char_list['frequency']>=10] -char_list = char_list['char'].as_matrix() +char_list = char_list['char'].to_numpy() for k, v in enumerate(char_list): if v==" ": @@ -125,4 +125,4 @@ def pickleload(path): f=open(save_path[:-4] + '.txt', 'w') f.writelines(text) - f.close() \ No newline at end of file + f.close()