Franco Stratta commited on
Commit
59f27e2
·
1 Parent(s): 049d4a0

Added files

Browse files
Files changed (4) hide show
  1. app.ipynb +311 -0
  2. app.py +30 -4
  3. settings.ini +4 -0
  4. sex.pkl +3 -0
app.ipynb ADDED
@@ -0,0 +1,311 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 92,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "#|default_exp app"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 93,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "#|export\n",
19
+ "from fastai.vision.all import *\n",
20
+ "import gradio as gr\n",
21
+ "\n",
22
+ "def is_man(x): return x[0].isupper()"
23
+ ]
24
+ },
25
+ {
26
+ "cell_type": "code",
27
+ "execution_count": 94,
28
+ "metadata": {},
29
+ "outputs": [],
30
+ "source": [
31
+ "girl1 = PILImage.create('/Users/francostratta/Desktop/learning/girl1.jpeg')\n",
32
+ "girl2 = PILImage.create('/Users/francostratta/Desktop/learning/girl2.jpg')\n",
33
+ "girl3 = PILImage.create('/Users/francostratta/Desktop/learning/girl3.jpg')\n",
34
+ "\n",
35
+ "man1 = PILImage.create('/Users/francostratta/Desktop/learning/man1.jpeg')\n",
36
+ "man2 = PILImage.create('/Users/francostratta/Desktop/learning/man2.jpg')\n",
37
+ "man3 = PILImage.create('/Users/francostratta/Desktop/learning/man3.jpg')"
38
+ ]
39
+ },
40
+ {
41
+ "cell_type": "code",
42
+ "execution_count": 95,
43
+ "metadata": {},
44
+ "outputs": [],
45
+ "source": [
46
+ "#|export\n",
47
+ "learn = load_learner('sex.pkl')"
48
+ ]
49
+ },
50
+ {
51
+ "cell_type": "code",
52
+ "execution_count": 96,
53
+ "metadata": {},
54
+ "outputs": [
55
+ {
56
+ "data": {
57
+ "text/html": [
58
+ "\n",
59
+ "<style>\n",
60
+ " /* Turns off some styling */\n",
61
+ " progress {\n",
62
+ " /* gets rid of default border in Firefox and Opera. */\n",
63
+ " border: none;\n",
64
+ " /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
65
+ " background-size: auto;\n",
66
+ " }\n",
67
+ " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
68
+ " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
69
+ " }\n",
70
+ " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
71
+ " background: #F44336;\n",
72
+ " }\n",
73
+ "</style>\n"
74
+ ],
75
+ "text/plain": [
76
+ "<IPython.core.display.HTML object>"
77
+ ]
78
+ },
79
+ "metadata": {},
80
+ "output_type": "display_data"
81
+ },
82
+ {
83
+ "data": {
84
+ "text/html": [],
85
+ "text/plain": [
86
+ "<IPython.core.display.HTML object>"
87
+ ]
88
+ },
89
+ "metadata": {},
90
+ "output_type": "display_data"
91
+ },
92
+ {
93
+ "data": {
94
+ "text/plain": [
95
+ "('women', tensor(1), tensor([6.9461e-05, 9.9993e-01]))"
96
+ ]
97
+ },
98
+ "execution_count": 96,
99
+ "metadata": {},
100
+ "output_type": "execute_result"
101
+ }
102
+ ],
103
+ "source": [
104
+ "learn.predict(girl1)"
105
+ ]
106
+ },
107
+ {
108
+ "cell_type": "code",
109
+ "execution_count": 97,
110
+ "metadata": {},
111
+ "outputs": [],
112
+ "source": [
113
+ "#|export\n",
114
+ "categories = (\"man\", \"girl\")\n",
115
+ "\n",
116
+ "def classify_image(img):\n",
117
+ " pred, idx, probs = learn.predict(img)\n",
118
+ " return dict(zip(categories, map(float,probs)))"
119
+ ]
120
+ },
121
+ {
122
+ "cell_type": "code",
123
+ "execution_count": 98,
124
+ "metadata": {},
125
+ "outputs": [
126
+ {
127
+ "data": {
128
+ "text/html": [
129
+ "\n",
130
+ "<style>\n",
131
+ " /* Turns off some styling */\n",
132
+ " progress {\n",
133
+ " /* gets rid of default border in Firefox and Opera. */\n",
134
+ " border: none;\n",
135
+ " /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
136
+ " background-size: auto;\n",
137
+ " }\n",
138
+ " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
139
+ " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
140
+ " }\n",
141
+ " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
142
+ " background: #F44336;\n",
143
+ " }\n",
144
+ "</style>\n"
145
+ ],
146
+ "text/plain": [
147
+ "<IPython.core.display.HTML object>"
148
+ ]
149
+ },
150
+ "metadata": {},
151
+ "output_type": "display_data"
152
+ },
153
+ {
154
+ "data": {
155
+ "text/html": [],
156
+ "text/plain": [
157
+ "<IPython.core.display.HTML object>"
158
+ ]
159
+ },
160
+ "metadata": {},
161
+ "output_type": "display_data"
162
+ },
163
+ {
164
+ "name": "stdout",
165
+ "output_type": "stream",
166
+ "text": [
167
+ "girl1: {'man': 6.946067151147872e-05, 'girl': 0.9999305009841919}\n"
168
+ ]
169
+ },
170
+ {
171
+ "data": {
172
+ "text/html": [
173
+ "\n",
174
+ "<style>\n",
175
+ " /* Turns off some styling */\n",
176
+ " progress {\n",
177
+ " /* gets rid of default border in Firefox and Opera. */\n",
178
+ " border: none;\n",
179
+ " /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
180
+ " background-size: auto;\n",
181
+ " }\n",
182
+ " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
183
+ " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
184
+ " }\n",
185
+ " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
186
+ " background: #F44336;\n",
187
+ " }\n",
188
+ "</style>\n"
189
+ ],
190
+ "text/plain": [
191
+ "<IPython.core.display.HTML object>"
192
+ ]
193
+ },
194
+ "metadata": {},
195
+ "output_type": "display_data"
196
+ },
197
+ {
198
+ "data": {
199
+ "text/html": [],
200
+ "text/plain": [
201
+ "<IPython.core.display.HTML object>"
202
+ ]
203
+ },
204
+ "metadata": {},
205
+ "output_type": "display_data"
206
+ },
207
+ {
208
+ "name": "stdout",
209
+ "output_type": "stream",
210
+ "text": [
211
+ "man1: {'man': 0.9999998807907104, 'girl': 1.692434494771078e-07}\n"
212
+ ]
213
+ }
214
+ ],
215
+ "source": [
216
+ "print('girl1: ',classify_image(girl1))\n",
217
+ "print('man1: ',classify_image(man1))"
218
+ ]
219
+ },
220
+ {
221
+ "cell_type": "code",
222
+ "execution_count": 99,
223
+ "metadata": {},
224
+ "outputs": [
225
+ {
226
+ "name": "stdout",
227
+ "output_type": "stream",
228
+ "text": [
229
+ "Running on local URL: http://127.0.0.1:7869\n",
230
+ "\n",
231
+ "To create a public link, set `share=True` in `launch()`.\n"
232
+ ]
233
+ },
234
+ {
235
+ "data": {
236
+ "text/html": [
237
+ "<div><iframe src=\"http://127.0.0.1:7869/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
238
+ ],
239
+ "text/plain": [
240
+ "<IPython.core.display.HTML object>"
241
+ ]
242
+ },
243
+ "metadata": {},
244
+ "output_type": "display_data"
245
+ },
246
+ {
247
+ "data": {
248
+ "text/plain": []
249
+ },
250
+ "execution_count": 99,
251
+ "metadata": {},
252
+ "output_type": "execute_result"
253
+ }
254
+ ],
255
+ "source": [
256
+ "#|export\n",
257
+ "Image = gr.Image()\n",
258
+ "label = gr.Label()\n",
259
+ "examples = ['girl1.jpeg', 'man1.jpeg']\n",
260
+ "\n",
261
+ "interface = gr.Interface(\n",
262
+ " fn=classify_image,\n",
263
+ " inputs=Image,\n",
264
+ " outputs=label,\n",
265
+ " examples=examples)\n",
266
+ "\n",
267
+ "interface.launch()"
268
+ ]
269
+ },
270
+ {
271
+ "cell_type": "code",
272
+ "execution_count": 100,
273
+ "metadata": {},
274
+ "outputs": [],
275
+ "source": [
276
+ "!nbdev_migrate "
277
+ ]
278
+ },
279
+ {
280
+ "cell_type": "code",
281
+ "execution_count": 101,
282
+ "metadata": {},
283
+ "outputs": [],
284
+ "source": [
285
+ "import nbdev\n",
286
+ "nbdev.export.nb_export('app.ipynb', '.')\n"
287
+ ]
288
+ }
289
+ ],
290
+ "metadata": {
291
+ "kernelspec": {
292
+ "display_name": "base",
293
+ "language": "python",
294
+ "name": "python3"
295
+ },
296
+ "language_info": {
297
+ "codemirror_mode": {
298
+ "name": "ipython",
299
+ "version": 3
300
+ },
301
+ "file_extension": ".py",
302
+ "mimetype": "text/x-python",
303
+ "name": "python",
304
+ "nbconvert_exporter": "python",
305
+ "pygments_lexer": "ipython3",
306
+ "version": "3.11.8"
307
+ }
308
+ },
309
+ "nbformat": 4,
310
+ "nbformat_minor": 2
311
+ }
app.py CHANGED
@@ -1,7 +1,33 @@
 
 
 
 
 
 
 
1
  import gradio as gr
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- demo = gr.Interface(fn=greet, inputs="text", outputs="text")
7
- demo.launch()
 
1
+ # AUTOGENERATED! DO NOT EDIT! File to edit: app.ipynb.
2
+
3
+ # %% auto 0
4
+ __all__ = ['learn', 'categories', 'Image', 'label', 'examples', 'interface', 'is_man', 'classify_image']
5
+
6
+ # %% app.ipynb 1
7
+ from fastai.vision.all import *
8
  import gradio as gr
9
 
10
+ def is_man(x): return x[0].isupper()
11
+
12
+ # %% app.ipynb 3
13
+ learn = load_learner('sex.pkl')
14
+
15
+ # %% app.ipynb 5
16
+ categories = ("man", "girl")
17
+
18
+ def classify_image(img):
19
+ pred, idx, probs = learn.predict(img)
20
+ return dict(zip(categories, map(float,probs)))
21
+
22
+ # %% app.ipynb 7
23
+ Image = gr.Image()
24
+ label = gr.Label()
25
+ examples = ['girl1.jpeg', 'man1.jpeg']
26
+
27
+ interface = gr.Interface(
28
+ fn=classify_image,
29
+ inputs=Image,
30
+ outputs=label,
31
+ examples=examples)
32
 
33
+ interface.launch()
 
settings.ini ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ [nbdev]
2
+ lib_name = app
3
+ nb_path = .
4
+ lib_path = .
sex.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0fa2a8ebc37b84410cf510bfd49dd7d1fdee4402daa656d65745b09ed421f1e6
3
+ size 87495200