Skip to content
GitLab
Explore
Sign in
Register
Primary navigation
Search or go to…
Project
O
opensource
Manage
Activity
Members
Labels
Plan
Issues
Issue boards
Milestones
Wiki
Code
Merge requests
Repository
Branches
Commits
Tags
Repository graph
Compare revisions
Snippets
Build
Pipelines
Jobs
Pipeline schedules
Artifacts
Deploy
Releases
Package registry
Container registry
Model registry
Operate
Environments
Terraform modules
Monitor
Incidents
Analyze
Value stream analytics
Contributor analytics
CI/CD analytics
Repository analytics
Model experiments
Help
Help
Support
GitLab documentation
Compare GitLab plans
GitLab community forum
Contribute to GitLab
Provide feedback
Terms and privacy
Keyboard shortcuts
?
Snippets
Groups
Projects
Show more breadcrumbs
yong hee won
opensource
Commits
e390f3d3
Commit
e390f3d3
authored
3 years ago
by
yong hee won
Browse files
Options
Downloads
Patches
Plain Diff
readme
parent
7abd6412
No related branches found
No related tags found
No related merge requests found
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
README.md
+14
-0
14 additions, 0 deletions
README.md
loss_function copy.ipynb
+64
-188
64 additions, 188 deletions
loss_function copy.ipynb
loss_function.ipynb
+35
-25
35 additions, 25 deletions
loss_function.ipynb
with
113 additions
and
213 deletions
README.md
0 → 100644
+
14
−
0
View file @
e390f3d3
# Loss function custom
## environment
-
miniforge
-
tensorflow 2.8.0
-
matplotlib
## dataset
-
mnist dataset
### Loss function
-
categorical cross entropy : 범주형 데이터에 대한 Loss function
\ No newline at end of file
This diff is collapsed.
Click to expand it.
loss_function copy.ipynb
+
64
−
188
View file @
e390f3d3
...
...
@@ -2,7 +2,7 @@
"cells": [
{
"cell_type": "code",
"execution_count":
3
,
"execution_count":
2
,
"metadata": {},
"outputs": [],
"source": [
...
...
@@ -12,31 +12,22 @@
"from keras.models import Sequential\n",
"from keras.layers import Dense, Dropout, Flatten\n",
"from keras.layers.convolutional import Conv2D, MaxPooling2D\n",
"from keras.utils import np_utils\n",
"import numpy as np"
]
},
{
"cell_type": "code",
"execution_count":
4
,
"execution_count":
3
,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz\n",
"11493376/11490434 [==============================] - 2s 0us/step\n",
"11501568/11490434 [==============================] - 2s 0us/step\n"
]
}
],
"outputs": [],
"source": [
"(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()"
]
},
{
"cell_type": "code",
"execution_count":
15
,
"execution_count":
4
,
"metadata": {},
"outputs": [
{
...
...
@@ -61,247 +52,132 @@
"\n",
"print('x_train shape:', x_train.shape)\n",
"print(x_train.shape[0], 'train samples')\n",
"print(x_test.shape[0], 'test samples')\n",
"\n"
"print(x_test.shape[0], 'test samples')\n"
]
},
{
"cell_type": "code",
"execution_count":
null
,
"execution_count":
5
,
"metadata": {},
"outputs": [],
"source": [
"def ccee(predict, label):\n",
" delta = 1e-7\n",
" log_pred = np.log(predict + delta)\n",
"\n",
" return -(np.sum(np.sum(label * log_pred, axis = 1)))/label.shape[0]"
"num_classes = 10\n",
"y_train = keras.utils.np_utils.to_categorical(y_train, num_classes)\n",
"y_test = keras.utils.np_utils.to_categorical(y_test, num_classes)"
]
},
{
"cell_type": "code",
"execution_count":
16
,
"execution_count":
7
,
"metadata": {},
"outputs": [
{
"ename": "AttributeError",
"evalue": "module 'keras.utils' has no attribute 'to_categorical'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mAttributeError\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m/Users/yongheewon/Documents/opensource/loss_function.ipynb Cell 4'\u001b[0m in \u001b[0;36m<cell line: 2>\u001b[0;34m()\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000006?line=0'>1</a>\u001b[0m num_classes \u001b[39m=\u001b[39m \u001b[39m10\u001b[39m\n\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000006?line=1'>2</a>\u001b[0m y_train \u001b[39m=\u001b[39m keras\u001b[39m.\u001b[39;49mutils\u001b[39m.\u001b[39;49mto_categorical(y_train, num_classes)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000006?line=2'>3</a>\u001b[0m y_test \u001b[39m=\u001b[39m keras\u001b[39m.\u001b[39mutils\u001b[39m.\u001b[39mto_categorical(y_test, num_classes)\n",
"\u001b[0;31mAttributeError\u001b[0m: module 'keras.utils' has no attribute 'to_categorical'"
]
}
],
"source": [
"num_classes = 10\n",
"y_train = keras.utils.to_categorical(y_train, num_classes)\n",
"y_test = keras.utils.to_categorical(y_test, num_classes)"
"data": {
"text/plain": [
"array([0., 0., 0., 0., 0., 1., 0., 0., 0., 0.], dtype=float32)"
]
},
{
"cell_type": "code",
"execution_count": 6,
"execution_count": 7,
"metadata": {},
"outputs": [
{
"ename": "ModuleNotFoundError",
"evalue": "No module named 'matplotlib'",
"output_type": "error",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mModuleNotFoundError\u001b[0m Traceback (most recent call last)",
"\u001b[1;32m/Users/yongheewon/Documents/opensource/loss_function.ipynb Cell 4'\u001b[0m in \u001b[0;36m<cell line: 2>\u001b[0;34m()\u001b[0m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=0'>1</a>\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mrandom\u001b[39;00m\n\u001b[0;32m----> <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=1'>2</a>\u001b[0m \u001b[39mimport\u001b[39;00m \u001b[39mmatplotlib\u001b[39;00m\u001b[39m.\u001b[39;00m\u001b[39mpyplot\u001b[39;00m \u001b[39mas\u001b[39;00m \u001b[39mplt\u001b[39;00m\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=3'>4</a>\u001b[0m predicted_result \u001b[39m=\u001b[39m model\u001b[39m.\u001b[39mpredict(x_test)\n\u001b[1;32m <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=4'>5</a>\u001b[0m predicted_labels \u001b[39m=\u001b[39m np\u001b[39m.\u001b[39margmax(predicted_result, axis\u001b[39m=\u001b[39m\u001b[39m1\u001b[39m)\n",
"\u001b[0;31mModuleNotFoundError\u001b[0m: No module named 'matplotlib'"
]
"output_type": "execute_result"
}
],
"source": [
"import random\n",
"import matplotlib.pyplot as plt\n",
"\n",
"predicted_result = model.predict(x_test)\n",
"predicted_labels = np.argmax(predicted_result, axis=1)\n",
"\n",
"test_labels = np.argmax(y_test, axis=1)\n",
"\n",
"count = 0\n",
"\n",
"plt.figure(figsize=(12,8))\n",
"for n in range(16):\n",
" count += 1\n",
" plt.subplot(4, 4, count)\n",
" plt.imshow(x_test[n].reshape(28, 28), cmap='Greys', interpolation='nearest')\n",
" tmp = \"Label:\" + str(test_labels[n]) + \", Prediction:\" + str(predicted_labels[n])\n",
" plt.title(tmp)\n",
"\n",
"plt.tight_layout()\n",
"plt.show()"
"y_train[0]"
]
},
{
"cell_type": "code",
"execution_count":
13
,
"execution_count":
8
,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Model: \"as3s6g-confscore-center\"\n",
"28 28\n",
"Model: \"image_class\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" input_1 (InputLayer) [(None, 75, 3)] 0 \n",
" \n",
" c1 (Conv1D) (None, 75, 64) 1024 \n",
" \n",
" c2 (Conv1D) (None, 75, 64) 20544 \n",
" \n",
" conv1d (Conv1D) (None, 75, 64) 20544 \n",
" \n",
" c3 (Conv1D) (None, 75, 128) 41088 \n",
" \n",
" c4 (Conv1D) (None, 75, 128) 82048 \n",
" \n",
" c6 (Conv1D) (None, 75, 256) 164096 \n",
" \n",
" c7 (Conv1D) (None, 75, 256) 327936 \n",
" \n",
" c9 (Conv1D) (None, 75, 512) 655872 \n",
" \n",
" c10 (Conv1D) (None, 75, 512) 1311232 \n",
" \n",
" conv1d_1 (Conv1D) (None, 75, 256) 655616 \n",
" \n",
" conv1d_2 (Conv1D) (None, 75, 256) 65792 \n",
" \n",
" conv1d_3 (Conv1D) (None, 75, 128) 32896 \n",
" \n",
" conv1d_4 (Conv1D) (None, 75, 128) 16512 \n",
" \n",
" flatten (Flatten) (None, 9600) 0 \n",
" input_1 (InputLayer) [(None, 28, 28, 1)] 0 \n",
" \n",
"
fc3 (Dense)
(None,
1
28
)
1228928
\n",
"
conv2d (Conv2D)
(None, 28
, 28, 32)
832
\n",
" \n",
" dropout (Dropout) (None, 128) 0 \n",
" max_pooling2d (MaxPooling2D (None, 14, 14, 32) 0 \n",
" ) \n",
" \n",
"
fc4 (Dense)
(None,
64)
8256 \n",
"
conv2d_1 (Conv2D)
(None,
14, 14, 64)
8256 \n",
" \n",
" dropout_1 (Dropout) (None, 64) 0 \n",
" max_pooling2d_1 (MaxPooling (None, 7, 7, 64) 0 \n",
" 2D) \n",
" \n",
" d
ense (Dense
)
(None,
32)
2080
\n",
" d
ropout (Dropout
) (None,
7, 7, 64)
0
\n",
" \n",
"
dropout_2 (Dropout)
(None, 3
2)
0 \n",
"
flatten (Flatten)
(None, 3
136)
0 \n",
" \n",
" dense
_1
(Dense) (None, 1
6
)
528
\n",
" dense (Dense)
(None, 1
000
)
3137000
\n",
" \n",
" dropout_
3
(Dropout) (None, 1
6)
0 \n",
" dropout_
1
(Dropout) (None, 1
000)
0 \n",
" \n",
" fc5 (Dense) (None, 12) 204 \n",
" \n",
" reshape (Reshape) (None, 6, 2) 0 \n",
" dense_1 (Dense) (None, 10) 10010 \n",
" \n",
"=================================================================\n",
"Total params: 4,635,196\n",
"Trainable params: 4,635,196\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n",
"Model: \"as3s6g-confscore-center\"\n",
"_________________________________________________________________\n",
" Layer (type) Output Shape Param # \n",
"=================================================================\n",
" input_1 (InputLayer) [(None, 75, 3)] 0 \n",
" \n",
" c1 (Conv1D) (None, 75, 64) 1024 \n",
" \n",
" c2 (Conv1D) (None, 75, 64) 20544 \n",
" \n",
" conv1d (Conv1D) (None, 75, 64) 20544 \n",
" \n",
" c3 (Conv1D) (None, 75, 128) 41088 \n",
" \n",
" c4 (Conv1D) (None, 75, 128) 82048 \n",
" \n",
" c6 (Conv1D) (None, 75, 256) 164096 \n",
" \n",
" c7 (Conv1D) (None, 75, 256) 327936 \n",
" \n",
" c9 (Conv1D) (None, 75, 512) 655872 \n",
" \n",
" c10 (Conv1D) (None, 75, 512) 1311232 \n",
" \n",
" conv1d_1 (Conv1D) (None, 75, 256) 655616 \n",
" \n",
" conv1d_2 (Conv1D) (None, 75, 256) 65792 \n",
" \n",
" conv1d_3 (Conv1D) (None, 75, 128) 32896 \n",
" \n",
" conv1d_4 (Conv1D) (None, 75, 128) 16512 \n",
" \n",
" flatten (Flatten) (None, 9600) 0 \n",
" \n",
" fc3 (Dense) (None, 128) 1228928 \n",
" \n",
" dropout (Dropout) (None, 128) 0 \n",
" \n",
" fc4 (Dense) (None, 64) 8256 \n",
" \n",
" dropout_1 (Dropout) (None, 64) 0 \n",
" \n",
" dense (Dense) (None, 32) 2080 \n",
" \n",
" dropout_2 (Dropout) (None, 32) 0 \n",
" \n",
" dense_1 (Dense) (None, 16) 528 \n",
" \n",
" dropout_3 (Dropout) (None, 16) 0 \n",
" \n",
" fc5 (Dense) (None, 12) 204 \n",
" \n",
" reshape (Reshape) (None, 6, 2) 0 \n",
" \n",
"=================================================================\n",
"Total params: 4,635,196\n",
"Trainable params: 4,635,196\n",
"Total params: 3,156,098\n",
"Trainable params: 3,156,098\n",
"Non-trainable params: 0\n",
"_________________________________________________________________\n"
]
}
],
"source": [
"def create_model( img_rows,img_cols, num_classes):\n",
" inputs = tf.keras.Input(shape=(
img_rows, img_cols
, 1))\n",
" x = tf.keras.layers.Conv2D(32, kernel_size = (5,5),
name='c1',
padding='same',\n",
"def create_model
_
( img_rows,img_cols, num_classes):\n",
" inputs = tf.keras.Input(shape=(
28, 28
, 1))
\n",
" x = tf.keras.layers.Conv2D(32, kernel_size = (5,5), padding='same',\n",
" activation='relu')(inputs)\n",
" x = tf.keras.layers.MaxPool2D(pool_size = (2,2),strides = (2,2))(x)\n",
" \n",
" x = tf.keras.layers.Conv2D(64, kernel_size = (2,2),
name='c1',
padding='same',\n",
" x = tf.keras.layers.Conv2D(64, kernel_size = (2,2),padding='same',\n",
" activation='relu')(x)\n",
" x = tf.keras.layers.MaxPool2D(pool_size = (2,2))(x)\n",
" \n",
" x = tf.keras.layers.Dropout(0.25)(x)\n",
" x = tf.keras.layers.Flatten(
name='flatten'
)(x)
\n",
" x = tf.keras.layers.Dense(1000,
name='fc3',
activation='leaky_relu')(x)\n",
" x = tf.keras.layers.Flatten()(x)\n",
" x = tf.keras.layers.Dense(1000, activation='leaky_relu')(x)\n",
" x = tf.keras.layers.Dropout(0.5)(x)\n",
" x = tf.keras.layers.Dense(num_classes, activation='softmax')(x)\n",
" model.summary()\n",
" return model\n",
" outputs = tf.keras.layers.Dense(num_classes, activation='softmax')(x)\n",
" \n",
" model = keras.Model(inputs, outputs, name='image_class')\n",
" return model\n",
"\n",
"\n",
"model = create_model(img_cols, img_rows, num_classes)\n",
"print(img_cols, img_rows)\n",
"model = create_model_(img_rows,img_cols, num_classes)\n",
"model.summary()"
]
},
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"def ccee(y_true, y_pred):\n",
" delta = 1e-7\n",
" log_pred = tf.math.log(y_pred + delta)\n",
"\n",
" return -(tf.reduce_sum(tf.reduce_sum(y_true * log_pred, axis = 1)))/y_true.shape[0]"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
"source": [
"model.compile( loss = ccee,optimizer='adam', metrics= ['accuracy'] )\n",
"hist = model.fit(x_train, y_train, batch_size=10, epochs= 10, verbose=1, validation_data=(x_test, y_test))"
]
}
],
"metadata": {
...
...
%% Cell type:code id: tags:
```
python
import
sys
import
tensorflow
as
tf
import
keras
from
keras.models
import
Sequential
from
keras.layers
import
Dense
,
Dropout
,
Flatten
from
keras.layers.convolutional
import
Conv2D
,
MaxPooling2D
from
keras.utils
import
np_utils
import
numpy
as
np
```
%% Cell type:code id: tags:
```
python
(
x_train
,
y_train
),
(
x_test
,
y_test
)
=
keras
.
datasets
.
mnist
.
load_data
()
```
%% Output
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz
11493376/11490434 [==============================] - 2s 0us/step
11501568/11490434 [==============================] - 2s 0us/step
%% Cell type:code id: tags:
```
python
img_rows
=
28
img_cols
=
28
x_train
=
x_train
.
reshape
(
x_train
.
shape
[
0
],
img_rows
,
img_cols
,
1
)
x_test
=
x_test
.
reshape
(
x_test
.
shape
[
0
],
img_rows
,
img_cols
,
1
)
x_train
=
x_train
.
astype
(
'
float32
'
)
/
255.
x_test
=
x_test
.
astype
(
'
float32
'
)
/
255.
print
(
'
x_train shape:
'
,
x_train
.
shape
)
print
(
x_train
.
shape
[
0
],
'
train samples
'
)
print
(
x_test
.
shape
[
0
],
'
test samples
'
)
```
%% Output
x_train shape: (60000, 28, 28, 1)
60000 train samples
10000 test samples
%% Cell type:code id: tags:
```
python
def
ccee
(
predict
,
label
):
delta
=
1e-7
log_pred
=
np
.
log
(
predict
+
delta
)
return
-
(
np
.
sum
(
np
.
sum
(
label
*
log_pred
,
axis
=
1
)))
/
label
.
shape
[
0
]
```
%% Cell type:code id: tags:
```
python
num_classes
=
10
y_train
=
keras
.
utils
.
to_categorical
(
y_train
,
num_classes
)
y_test
=
keras
.
utils
.
to_categorical
(
y_test
,
num_classes
)
y_train
=
keras
.
utils
.
np_utils
.
to_categorical
(
y_train
,
num_classes
)
y_test
=
keras
.
utils
.
np_utils
.
to_categorical
(
y_test
,
num_classes
)
```
%% Output
---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
/Users/yongheewon/Documents/opensource/loss_function.ipynb Cell 4' in <cell line: 2>()
<a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000006?line=0'>1</a> num_classes = 10
----> <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000006?line=1'>2</a> y_train = keras.utils.to_categorical(y_train, num_classes)
<a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000006?line=2'>3</a> y_test = keras.utils.to_categorical(y_test, num_classes)
AttributeError: module 'keras.utils' has no attribute 'to_categorical'
%% Cell type:code id: tags:
```
python
import
random
import
matplotlib.pyplot
as
plt
predicted_result
=
model
.
predict
(
x_test
)
predicted_labels
=
np
.
argmax
(
predicted_result
,
axis
=
1
)
test_labels
=
np
.
argmax
(
y_test
,
axis
=
1
)
count
=
0
plt
.
figure
(
figsize
=
(
12
,
8
))
for
n
in
range
(
16
):
count
+=
1
plt
.
subplot
(
4
,
4
,
count
)
plt
.
imshow
(
x_test
[
n
].
reshape
(
28
,
28
),
cmap
=
'
Greys
'
,
interpolation
=
'
nearest
'
)
tmp
=
"
Label:
"
+
str
(
test_labels
[
n
])
+
"
, Prediction:
"
+
str
(
predicted_labels
[
n
])
plt
.
title
(
tmp
)
plt
.
tight_layout
()
plt
.
show
()
y_train
[
0
]
```
%% Output
---------------------------------------------------------------------------
ModuleNotFoundError Traceback (most recent call last)
/Users/yongheewon/Documents/opensource/loss_function.ipynb Cell 4' in <cell line: 2>()
<a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=0'>1</a> import random
----> <a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=1'>2</a> import matplotlib.pyplot as plt
<a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=3'>4</a> predicted_result = model.predict(x_test)
<a href='vscode-notebook-cell:/Users/yongheewon/Documents/opensource/loss_function.ipynb#ch0000003?line=4'>5</a> predicted_labels = np.argmax(predicted_result, axis=1)
ModuleNotFoundError: No module named 'matplotlib'
array([0., 0., 0., 0., 0., 1., 0., 0., 0., 0.], dtype=float32)
%% Cell type:code id: tags:
```
python
def
create_model
(
img_rows
,
img_cols
,
num_classes
):
inputs
=
tf
.
keras
.
Input
(
shape
=
(
img_rows
,
img_cols
,
1
))
x
=
tf
.
keras
.
layers
.
Conv2D
(
32
,
kernel_size
=
(
5
,
5
),
name
=
'
c1
'
,
padding
=
'
same
'
,
def
create_model
_
(
img_rows
,
img_cols
,
num_classes
):
inputs
=
tf
.
keras
.
Input
(
shape
=
(
28
,
28
,
1
))
x
=
tf
.
keras
.
layers
.
Conv2D
(
32
,
kernel_size
=
(
5
,
5
),
padding
=
'
same
'
,
activation
=
'
relu
'
)(
inputs
)
x
=
tf
.
keras
.
layers
.
MaxPool2D
(
pool_size
=
(
2
,
2
),
strides
=
(
2
,
2
))(
x
)
x
=
tf
.
keras
.
layers
.
Conv2D
(
64
,
kernel_size
=
(
2
,
2
),
name
=
'
c1
'
,
padding
=
'
same
'
,
x
=
tf
.
keras
.
layers
.
Conv2D
(
64
,
kernel_size
=
(
2
,
2
),
padding
=
'
same
'
,
activation
=
'
relu
'
)(
x
)
x
=
tf
.
keras
.
layers
.
MaxPool2D
(
pool_size
=
(
2
,
2
))(
x
)
x
=
tf
.
keras
.
layers
.
Dropout
(
0.25
)(
x
)
x
=
tf
.
keras
.
layers
.
Flatten
(
name
=
'
flatten
'
)(
x
)
x
=
tf
.
keras
.
layers
.
Dense
(
1000
,
name
=
'
fc3
'
,
activation
=
'
leaky_relu
'
)(
x
)
x
=
tf
.
keras
.
layers
.
Flatten
()(
x
)
x
=
tf
.
keras
.
layers
.
Dense
(
1000
,
activation
=
'
leaky_relu
'
)(
x
)
x
=
tf
.
keras
.
layers
.
Dropout
(
0.5
)(
x
)
x
=
tf
.
keras
.
layers
.
Dense
(
num_classes
,
activation
=
'
softmax
'
)(
x
)
model
.
summary
()
return
model
outputs
=
tf
.
keras
.
layers
.
Dense
(
num_classes
,
activation
=
'
softmax
'
)(
x
)
model
=
keras
.
Model
(
inputs
,
outputs
,
name
=
'
image_class
'
)
return
model
model
=
create_model
(
img_cols
,
img_rows
,
num_classes
)
print
(
img_cols
,
img_rows
)
model
=
create_model_
(
img_rows
,
img_cols
,
num_classes
)
model
.
summary
()
```
%% Output
Model: "as3s6g-confscore-center"
28 28
Model: "image_class"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 75, 3)] 0
c1 (Conv1D) (None, 75, 64) 1024
c2 (Conv1D) (None, 75, 64) 20544
conv1d (Conv1D) (None, 75, 64) 20544
c3 (Conv1D) (None, 75, 128) 41088
c4 (Conv1D) (None, 75, 128) 82048
c6 (Conv1D) (None, 75, 256) 164096
c7 (Conv1D) (None, 75, 256) 327936
c9 (Conv1D) (None, 75, 512) 655872
input_1 (InputLayer) [(None, 28, 28, 1)] 0
c
10
(Conv
1
D)
(None,
75, 51
2)
13112
32
c
onv2d
(Conv
2
D) (None,
28, 28, 3
2)
8
32
conv1d_1 (Conv1D) (None, 75, 256) 655616
max_pooling2d (MaxPooling2D (None, 14, 14, 32) 0
)
conv
1
d_
2
(Conv
1
D) (None,
75, 256
)
65792
conv
2
d_
1
(Conv
2
D) (None,
14, 14, 64
)
8256
conv1d_3 (Conv1D) (None, 75, 128) 32896
max_pooling2d_1 (MaxPooling (None, 7, 7, 64) 0
2D)
conv1d_4 (Conv1D
) (None, 7
5
,
128
)
16512
dropout (Dropout
) (None, 7,
7, 64
)
0
flatten (Flatten) (None,
9600
) 0
flatten (Flatten) (None,
3136
) 0
fc3
(Dense)
(None, 1
28
)
1228928
dense
(Dense) (None, 1
000
)
3137000
dropout (Dropout)
(None, 1
28)
0
dropout
_1
(Dropout) (None, 1
000)
0
fc4 (Dense) (None, 64) 8256
dropout_1 (Dropout) (None, 64) 0
dense (Dense) (None, 32) 2080
dropout_2 (Dropout) (None, 32) 0
dense_1 (Dense) (None, 16) 528
dropout_3 (Dropout) (None, 16) 0
fc5 (Dense) (None, 12) 204
reshape (Reshape) (None, 6, 2) 0
dense_1 (Dense) (None, 10) 10010
=================================================================
Total params: 4,635,196
Trainable params: 4,635,196
Non-trainable params: 0
_________________________________________________________________
Model: "as3s6g-confscore-center"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 75, 3)] 0
c1 (Conv1D) (None, 75, 64) 1024
c2 (Conv1D) (None, 75, 64) 20544
conv1d (Conv1D) (None, 75, 64) 20544
c3 (Conv1D) (None, 75, 128) 41088
c4 (Conv1D) (None, 75, 128) 82048
c6 (Conv1D) (None, 75, 256) 164096
c7 (Conv1D) (None, 75, 256) 327936
c9 (Conv1D) (None, 75, 512) 655872
c10 (Conv1D) (None, 75, 512) 1311232
conv1d_1 (Conv1D) (None, 75, 256) 655616
conv1d_2 (Conv1D) (None, 75, 256) 65792
conv1d_3 (Conv1D) (None, 75, 128) 32896
conv1d_4 (Conv1D) (None, 75, 128) 16512
flatten (Flatten) (None, 9600) 0
fc3 (Dense) (None, 128) 1228928
dropout (Dropout) (None, 128) 0
fc4 (Dense) (None, 64) 8256
dropout_1 (Dropout) (None, 64) 0
dense (Dense) (None, 32) 2080
dropout_2 (Dropout) (None, 32) 0
dense_1 (Dense) (None, 16) 528
dropout_3 (Dropout) (None, 16) 0
fc5 (Dense) (None, 12) 204
reshape (Reshape) (None, 6, 2) 0
=================================================================
Total params: 4,635,196
Trainable params: 4,635,196
Total params: 3,156,098
Trainable params: 3,156,098
Non-trainable params: 0
_________________________________________________________________
%% Cell type:code id: tags:
```
python
def
ccee
(
y_true
,
y_pred
):
delta
=
1e-7
log_pred
=
tf
.
math
.
log
(
y_pred
+
delta
)
return
-
(
tf
.
reduce_sum
(
tf
.
reduce_sum
(
y_true
*
log_pred
,
axis
=
1
)))
/
y_true
.
shape
[
0
]
```
%% Cell type:code id: tags:
```
python
model
.
compile
(
loss
=
ccee
,
optimizer
=
'
adam
'
,
metrics
=
[
'
accuracy
'
]
)
hist
=
model
.
fit
(
x_train
,
y_train
,
batch_size
=
10
,
epochs
=
10
,
verbose
=
1
,
validation_data
=
(
x_test
,
y_test
))
```
...
...
This diff is collapsed.
Click to expand it.
loss_function.ipynb
+
35
−
25
View file @
e390f3d3
...
...
@@ -18,10 +18,10 @@
},
{
"cell_type": "markdown",
"source": [],
"metadata": {
"collapsed": false
}
},
"source": []
},
{
"cell_type": "code",
...
...
@@ -159,14 +159,14 @@
{
"cell_type": "code",
"execution_count": null,
"outputs": [],
"source": [],
"metadata": {
"collapsed": false,
"pycharm": {
"name": "#%%\n"
}
}
},
"outputs": [],
"source": []
},
{
"cell_type": "code",
...
...
@@ -177,40 +177,39 @@
"name": "stdout",
"output_type": "stream",
"text": [
"Epoch 1/10\n",
" 1/6000 [..............................] - ETA: 17:42 - loss: 2.2814 - accuracy: 0.2000"
"Epoch 1/10\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"2022-06-26 1
1:32:02.208949
: W tensorflow/core/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
"2022-06-26 1
2:24:22.787706
: W tensorflow/core/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz\n"
]
},
{
"name": "stdout",
"output_type": "stream",
"text": [
"6000/6000 [==============================] -
9
6s 1
6
ms/step - loss: 0.12
36
- accuracy: 0.96
16
- val_loss: 0.0
493
- val_accuracy: 0.98
53
\n",
"6000/6000 [==============================] - 6
8
s 1
1
ms/step - loss: 0.12
59
- accuracy: 0.96
08
- val_loss: 0.0
342
- val_accuracy: 0.98
88
\n",
"Epoch 2/10\n",
"6000/6000 [==============================] -
150s 25
ms/step - loss: 0.06
80
- accuracy: 0.9
796
- val_loss: 0.0
501
- val_accuracy: 0.98
4
5\n",
"6000/6000 [==============================] -
77s 13
ms/step - loss: 0.06
57
- accuracy: 0.9
800
- val_loss: 0.0
442
- val_accuracy: 0.98
6
5\n",
"Epoch 3/10\n",
"6000/6000 [==============================] -
186s 31
ms/step - loss: 0.055
7
- accuracy: 0.98
35
- val_loss: 0.0
439
- val_accuracy: 0.9
856
\n",
"6000/6000 [==============================] -
81s 13
ms/step - loss: 0.055
4
- accuracy: 0.98
27
- val_loss: 0.0
312
- val_accuracy: 0.9
902
\n",
"Epoch 4/10\n",
"6000/6000 [==============================] -
206s 34
ms/step - loss: 0.05
17
- accuracy: 0.98
48
- val_loss: 0.03
37
- val_accuracy: 0.98
90
\n",
"6000/6000 [==============================] -
91s 15
ms/step - loss: 0.05
03
- accuracy: 0.98
50
- val_loss: 0.03
85
- val_accuracy: 0.98
85
\n",
"Epoch 5/10\n",
"6000/6000 [==============================] - 1
8
0s
30
ms/step - loss: 0.048
0
- accuracy: 0.986
1
- val_loss: 0.0
295
- val_accuracy: 0.9
912
\n",
"6000/6000 [==============================] - 1
1
0s
18
ms/step - loss: 0.048
1
- accuracy: 0.986
2
- val_loss: 0.0
359
- val_accuracy: 0.9
887
\n",
"Epoch 6/10\n",
"6000/6000 [==============================] - 1
59s 26
ms/step - loss: 0.04
36
- accuracy: 0.986
9
- val_loss: 0.0
415
- val_accuracy: 0.98
80
\n",
"6000/6000 [==============================] - 1
10s 18
ms/step - loss: 0.04
81
- accuracy: 0.986
0
- val_loss: 0.0
637
- val_accuracy: 0.98
29
\n",
"Epoch 7/10\n",
"6000/6000 [==============================] - 1
62s 27
ms/step - loss: 0.04
92
- accuracy: 0.98
6
3 - val_loss: 0.04
33
- val_accuracy: 0.9
876
\n",
"6000/6000 [==============================] - 1
08s 18
ms/step - loss: 0.04
44
- accuracy: 0.98
7
3 - val_loss: 0.04
17
- val_accuracy: 0.9
903
\n",
"Epoch 8/10\n",
"6000/6000 [==============================] - 1
39s 23
ms/step - loss: 0.04
54
- accuracy: 0.9875 - val_loss: 0.03
7
2 - val_accuracy: 0.990
1
\n",
"6000/6000 [==============================] - 1
16s 19
ms/step - loss: 0.04
32
- accuracy: 0.9875 - val_loss: 0.032
5
- val_accuracy: 0.990
6
\n",
"Epoch 9/10\n",
"6000/6000 [==============================] - 1
38s 23
ms/step - loss: 0.04
24
- accuracy: 0.988
3
- val_loss: 0.03
14
- val_accuracy: 0.9
905
\n",
"6000/6000 [==============================] - 1
03s 17
ms/step - loss: 0.04
30
- accuracy: 0.988
6
- val_loss: 0.03
29
- val_accuracy: 0.9
898
\n",
"Epoch 10/10\n",
"6000/6000 [==============================] -
133s 22
ms/step - loss: 0.04
21
- accuracy: 0.988
5
- val_loss: 0.0
401
- val_accuracy: 0.9
893
\n"
"6000/6000 [==============================] -
97s 16
ms/step - loss: 0.04
48
- accuracy: 0.988
4
- val_loss: 0.0
318
- val_accuracy: 0.9
902
\n"
]
}
],
...
...
@@ -221,22 +220,33 @@
},
{
"cell_type": "code",
"execution_count":
8
,
"execution_count":
13
,
"metadata": {},
"outputs": [
{
"data": {
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAY4AAAEWCAYAAABxMXBSAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAAsTAAALEwEAmpwYAAA5fUlEQVR4nO3deXxU9bn48c+TyU5CNvY1AUHEjSXgAm21at2qiFtxL2Kttdal3ttae3+39t72lrZ6q22t1gWrdcWdWnevG0EREFQELAESCGsSEkL2ZPL8/viewBACTCAnM8k879drXpmzznOGcJ6c7yqqijHGGBOuuEgHYIwxpnuxxGGMMaZDLHEYY4zpEEscxhhjOsQShzHGmA6xxGGMMaZDLHEYcwAi8jcR+VWY+xaJyKl+x2RMJFniMMYY0yGWOIyJESISH+kYTM9gicP0CF4R0b+LyOciUiMiD4tIfxF5TUR2isjbIpIVsv+5IvKliFSKyHsickTItvEi8ql33DNAcpvP+raILPOOXSAix4QZ49kislREqkRkg4jc0Wb7VO98ld7273rrU0TkLhEpFpEdIjLfW3eSiJS08z2c6r2/Q0SeE5HHRaQK+K6ITBaRj7zP2CwifxaRxJDjjxSRt0Rku4hsFZHbRWSAiNSKSE7IfhNFpFREEsK5dtOzWOIwPckFwGnAaOAc4DXgdqAP7nf9RgARGQ08BdwM9AVeBf4hIoneTfQl4O9ANvCsd168YycAc4DvAznAX4F5IpIURnw1wJVAJnA28AMROc877zAv3j95MY0DlnnH3QlMBE70YvoJ0BLmdzINeM77zCeAIHAL7js5ATgFuN6LIR14G3gdGAQcBryjqluA94CLQ857OfC0qjaFGYfpQSxxmJ7kT6q6VVU3Ah8CC1V1qao2AC8C4739vgP8U1Xf8m58dwIpuBvz8UACcLeqNqnqc8CikM/4HvBXVV2oqkFVfRRo8I7bL1V9T1W/UNUWVf0cl7y+4W2+DHhbVZ/yPrdcVZeJSBxwNXCTqm70PnOBd03h+EhVX/I+s05Vl6jqx6rarKpFuMTXGsO3gS2qepeq1qvqTlVd6G17FJcsEJEAcAkuuZoYZInD9CRbQ97XtbOc5r0fBBS3blDVFmADMNjbtlH3HP2zOOT9cOBWr6inUkQqgaHecfslIseJyLteEc8O4DrcX/5451jTzmF9cEVl7W0Lx4Y2MYwWkVdEZItXfPU/YcQA8DIwVkRG4J7qdqjqJwcZk+nmLHGYWLQJlwAAEBHB3TQ3ApuBwd66VsNC3m8Afq2qmSGvVFV9KozPfRKYBwxV1QzgfqD1czYAI9s5pgyo38e2GiA15DoCuGKuUG2Hv74PWAWMUtXeuKK8A8WAqtYDc3FPRldgTxsxzRKHiUVzgbNF5BSvcvdWXHHTAuAjoBm4UUTiReR8YHLIsQ8C13lPDyIivbxK7/QwPjcd2K6q9SIyGbg0ZNsTwKkicrH3uTkiMs57GpoD/K+IDBKRgIic4NWp/AtI9j4/AfgP4EB1LelAFVAtImOAH4RsewUYICI3i0iSiKSLyHEh2x8DvgucCzwexvWaHsoSh4k5qvoVrrz+T7i/6M8BzlHVRlVtBM7H3SArcPUhL4QcuxhXz/Fnb3uht284rgf+S0R2Av+JS2Ct510PnIVLYttxFePHepv/DfgCV9eyHfgtEKeqO7xzPoR7WqoB9mhl1Y5/wyWsnbgk+ExIDDtxxVDnAFuA1cDJIdsLcJXyn3r1IyZGiU3kZIwJl4j8H/Ckqj4U6VhM5FjiMMaERUQmAW/h6mh2RjoeEzlWVGWMOSAReRTXx+NmSxrGnjiMMcZ0iD1xGGOM6ZCYGPSsT58+mpubG+kwjDGmW1myZEmZqrbtGxQbiSM3N5fFixdHOgxjjOlWRKS4vfVWVGWMMaZDLHEYY4zpEEscxhhjOiQm6jja09TURElJCfX19ZEOxVfJyckMGTKEhASbb8cY0zliNnGUlJSQnp5Obm4uew6E2nOoKuXl5ZSUlJCXlxfpcIwxPUTMFlXV19eTk5PTY5MGgIiQk5PT45+qjDFdK2YTB9Cjk0arWLhGY0zXitmiKmNMNxRshoYqqN+x+2d9lfe+Chp2QnIG9BkFfUZD70Fgfzx1OkscEVJZWcmTTz7J9ddf36HjzjrrLJ588kkyMzP9CcwYvwSb3I29vrLNzT705459rPd+NtV27DMT03YnkV0/R0P2CIg/0JxXZl98TRwicgZwDxAAHlLV2W22Z+FmNxuJmx7zalVd7m27CTdhjgAPqurd3vps3OQzuUARcLGqVvh5HX6orKzkL3/5y16JIxgMEggE9nncq6++6ndoxnTcxk9h+fOHftNPSIWk3pDce/fPjCEhyxl7b9+1LgOS0qF2O5T9C8q+grLV7n3xAvj8md2fI3GQlRuSUA7f/T4127evqafwLXF48x/fi5tRrARYJCLzVHVFyG63A8tUdbo3jeW9wCkichQuaUwGGoHXReSfqroauA14R1Vni8ht3vJP/boOv9x2222sWbOGcePGkZCQQFpaGgMHDmTZsmWsWLGC8847jw0bNlBfX89NN93EtddeC+wePqW6upozzzyTqVOnsmDBAgYPHszLL79MSkqKPwGXr4E3bnd/pR05HUZ9CxJ7+fNZpnvZuQUePx8aayAlu/2bfnIGJGXsuW2Pn95NP9AJzcbT+7tX3tf2XN9YA+WFLpmUfuUll9Ww5l0INuzeL7XP3k8ofUdDxlCI2/cfdV2mpcU9tdWWu1dN2e73teUucYYun3cfDD+hU0Pw84ljMlCoqmsBRORpYBoQmjjGAr8BUNVVIpIrIv2BI4CPVbXWO/Z9YDrwO+8cJ3nHPwq8xyEmjl/+40tWbKo6lFPsZeyg3vzinCP3uX327NksX76cZcuW8d5773H22WezfPnyXc1m58yZQ3Z2NnV1dUyaNIkLLriAnJycPc6xevVqnnrqKR588EEuvvhinn/+eS6//PJOvQ5U4bOn4Z+3QiAeAkmw4mWIT4HR34Kx58Ho0y2JxCpVePkGaKqHHyxwN9toldgLBh7rXqFaglC5fvfTSetr1SvuxtsqPhlyDtszofQZ5dYd7O+/qiu+qy3b+4a/r0RQVwHa0v754pNd4kvNhl59IDsPElMPLrb98DNxDAY2hCyXAMe12ecz3PzO80VkMjAcGAIsB34tIjlAHW4u5tZRCvur6mYAVd0sIv3a+3ARuRa4FmDYsGGdckF+mjx58h59Lf74xz/y4osvArBhwwZWr169V+LIy8tj3LhxAEycOJGioqLODaq+Cv75Y/jiWRg+Bc5/ANIHusf+FS/BinmWRGLdkkeg8C048/fRnTT2Jy7gbrDZee73OFRNOZSHJpTVsGmZ+70PvXlnDA0p8vKSSUtzeMmgpWkfccVDas7uV7+xey6n5rgEEbrsQ5Joj5+Jo72mDG1njZoN3CMiy4AvgKVAs6quFJHf4qaprMYlmOaOfLiqPgA8AJCfn7/f2ar292TQVXr12n2zfe+993j77bf56KOPSE1N5aSTTmq3L0ZS0u7KvUAgQF1dXecFVLIYnp/l/hI7+efwtVt3P6bnfc29zvwdrP8IvnzRkkgsKl8Db/wcRpwEk66JdDT+6JXjXsOO33N9Uz1sX7s7mbQmlk8fg6aadk4kkJLlbu6tTwJDJraTCEKSQVLvqG0R5mfiKAGGhiwPATaF7qCqVcBMAHEdDtZ5L1T1YeBhb9v/eOcD2CoiA72njYHANh+vwTfp6ens3Nn+DJw7duwgKyuL1NRUVq1axccff9x1gbW0QMHd8O6v3dPFzNf2/k/TKi4AuVPda19JZNRprk7EkkjP0hKEl34AcQkw7S8QF2NdwhKSof9Y9wqlClUbXVINJO5OBimZ0VE/0kn8TByLgFEikgdsBGYAl4buICKZQK2qNgLXAB94yQQR6aeq20RkGK44q7V2Zx5wFe5p5SrgZR+vwTc5OTlMmTKFo446ipSUFPr3779r2xlnnMH999/PMcccw+GHH87xx+/jxt3ZqjbDi9+Hde+7J4Zz7nG/8OHYXxJZOS8kiZwHo06HpDQfL8T4ruAe2LAQzn8QMgZHOproIeIaBGQMiXQkvvJ1znEROQu4G9ccd46q/lpErgNQ1ftF5ATgMSCIqzSf1dq0VkQ+BHKAJuDHqvqOtz4HmAsMA9YDF6nq9v3FkZ+fr20nclq5ciVHHHFEZ11qVAvrWr96HV6+Hprq4MzfwvgrOucxuSXoJZGX3FNIzTZLIt3d5s/hwW/CmLPhor9FbXGKOXQiskRV8/da72fiiBaWOPZzrU318NZ/wid/hf5Hw4VzXNNDP1gS6f6a6uHBk12l7vUfW5+HHm5ficN6jsey0q/guath63I47gdw6h2u7NYvexRn/XZ3ElkZWpx1qtdPxJJIVHr317BtBVz2nCWNGGaJY3/qKl3RTa8+ndMxKVqowqePwmu3ueZ7l851ldddab9J5B+uPfqo07zWWWdYEokGRQWw4E8wcab7tzExyxLH/jTWuOKU6q1eh5p+/v5F3hXqKuAfN7miohEnwfS/QvqAyMa0VxL52FWsH2oSUXXjIzXVuj8Adv2sa2fd/rbtY11CClzwIAwa7+vXExUadsJL17lhOr71q0hHE7UamoMs31jF5yWVtCikJ8XTKymeXkkB0pO994nxu94nBLpnazRLHPuTMdg1pavZtrvDTlJvSOvnBk/rbpWC6z+G56+BnZvh1F/CiTdGXzPKuADkTnGvfSWRkd90Q1Ts68beGPJegx2PIZDokkJCapufKa6VWeu6te/BExfDNW9D1vDO/iaiy+s/gx0lMPN1e/oLsa2qniXFFXy6voIlxRUs31hFY3AfvbrbkRgftyu5pHmvXkkB0pITSEsK0CsxnrTk0G3e+2SXgHa9TwqQFN91zX0tcRxIQjJkDnN9GmrL3Lgw5YXu5tGrn7uRSJTdfNtShfd+C+/PhszhcPWbrvNRtGsviax4CVa/6Xrtht7YU7LcENrt3ez3WrefbfEpbmiVcGxbBXO+BU9cCFe/0XPL/L96DZb+Hab+GIa1HfwhdjQHW1i1ZSdLiit2JYuSCtfpNjE+jmMGZzBzSi4ThmcxfmgmSQkBqhuaqWloZme9+1nT0MzOhr3fV9c3U90QpKahmbLqRorLa3dtq20M74+fhIDsmVy89z8+bTTHDs3s1O/CEke4AgkuefTqD3XboXobVBZD1SZI6+vGh/Gxg09aWhrV1dUdP7C5EWpK4b3/gWO+A2fd6QaW625Ckwi/j3Q0Tr8xMONJ+Pt0ePpSuOKl7l+U2VZNGcz7kWtxd9LPIh1Nl6qoaWTphopdieKzDTuoa3I38f69k8gfns13T8xl4vAsjhyUQWL83n9AZqQcet1osEWpaQxJNvXN1DQEqW5o2pVsqr3X7iTkXpW1jQR9aDlriaOj4uJcZXlqjhsqunqbSx47t3rDCfSF+MRIR+nUVbohQ4KNri7j2BmRjqjnyZ3qRh99fparA7hgTvQV/x0sVVcfVr8Drnw5en6vfdDSohSWVrsnieIKlqyvYG2pGzokPk4YO6g335k0lInDs5gwPItBGcldNrtmIE7onZxA7+ToaaBjieNgibhy9uQMV6Zevc3VhdRsg+Qsrx5k3wOO/fSnP2X48OG75uO44447EBE++OADKioqaGpq4le/+hXTpk3reGwtQTfsQW25K4JJ6w9HWSsY3xx9ofu+3/pP6D0YTv91pCPqHJ897UaIPe2/oH/kx3PrTDvrm/hsww73NLG+gqXrK9hZ74bDy+6VyIRhWVw4cQgTh2VxzJBMUhJ7znAhncESB7hmqVu+OPTzaIsb6TLYBDkj3aN9Wr92ByubMWMGN998867EMXfuXF5//XVuueUWevfuTVlZGccffzznnntux/6yaaqDiiJornefnT7QTWhj/HXija7y+KM/u5FSj78u0hEdmsoN8NpPYNiJcMINkY7mkKgqxeW1uyqwlxRX8NXWnai6/5aH90/nnGMHMWFYFhOHZ5Gbk9plTxPdlSWOziRxbr6KQKJrddXc4EbQjE/yKtKzdxVjjB8/nm3btrFp0yZKS0vJyspi4MCB3HLLLXzwwQfExcWxceNGtm7dyoABYTSXVXWV9zs2esNEj+yedRndlQicMdsVW75+m6uoH3tupKM6OC0tbgBDbYHp93W7wfnqm4J8XrJjVwX2p8UVlNc0Aq557LhhmZxx1AAmDs/i2KGZUVUE1F1Y4gA4c/aB9zkY2uLqGaq3wY4Nrhlsrz6Q2hcC8Vx44YU899xzbNmyhRkzZvDEE09QWlrKkiVLSEhIIDc3t93h1PcSbHZ1GQ073NNN5rCe1WGxu4gLuEH/HjsXXvieKyLsjq2QFt4PRR/CuX9y/TZ8pKo0NLdQ2xiktrGZusYgtY1B6pqCu97XNjZT19T6PkhdyPKu/RuD1DY1U9sQZP32WppbXIXwiD69OHlMv11PE4f1SyMQZ08Th8oSh58kzjXRTMmCxmqXQHZu2VWRPuOC6Xzv+hsoKyvj/fffZ+7cufTr14+EhATeffddiouLD/wZDTuhothNGtN7sKuct8fsyElMhUuehodPg6dmwKy3oM9hkY4qfNtWwdt3wOgz3UCXYaisbWRRUQXF5TXeDdzdyHe/bw656bsbfOj2jjb6SU6IIzUxnpSEAKmJ7pWSGKBfejIp2QFOP2oAE4e5SuzsXj23Qj+SLHF0BRE3n3JSuquDqCmF2nKO7CfsrCxn8MABDBwwgMsuu4xzzjmH/Px8xo0bx5gxY/Z9Tm1xSah6qyse6zO6y2b/MgfQq48by+nh0+CJC2DW267JdrQLNrlh9ZPS4Nw/7vMPkG1V9XxStJ1P1rnXqi17zisTHyekJLbe1Hff4NOT4+mXnuTd6OP3uOmnJrh9k3e999Z7+7WeLzk+QJw9MUScJY6ulpCyu0NhTRlfvPOs691c9i/6pPXjowUL2v0Pu0cfjuYGVwHeVOuaAPce3O3KoXu8nJFuDLC/fRuevBi++0r0T2T1we9h8zL4zuOuYQWuKKmkoo6F67bzybpyPlm3naLyWgBSEwNMHJ7F2UcPZHJeNocPSCc1Mb7d/gymZ7HEESmBBOg90P0HrdsO1aUuGQQSXXFTak77yaB2u2u9A678OSWrK6M2HTEkHy58GJ65HJ6b5W7I4fZK72olS+CDO9FjZ1CYfRILPy5mkfdUsXmHq2fLSElgUm42lx03nMl52Rw5qDfx3XSsJXNoovS3OIbEBbxE0cd1tKrZ5voE7Nzi5jpO9ToUtgRdwqjbDgm93NhI8UkHPr+JrDFnuxkRX/0317z17Luiqg4q2KKs2rCVwc/MJBjIYfoXZ7J+4QcA9EtPYnJeNsflZTMpL5vR/dKtmMgAMZ44VDV62muLuHGvUjLdqLzV27xXqbeuFoINkDbAjWYbZtyxMFFX1Jv8PdeqruAeN6Xo134csVAamoN8UbKDheu2s6hoO0uKKrg1+BBHxhdzU9J/M/nwPG7IzWZyXjbDrT+D2YeYTRzJycmUl5eTk5MTff85EntBdp6ry/Aq0pEA5BzmKtjDpKqUl5eTnNzDxk/qjk65wz0xvvNL10HwmIu65GNrG5tZur5yVx3F0vWVNDS70VtH9UvjlpEb+e7aN6kefy33TLuxS2Iy3V/MJo4hQ4ZQUlJCaWlppEM5MPX6ZFSUdPjQ5ORkhgwZ0skBmar6JlZvrSZOICk+QGJ8HEm7Xm45MT5ud5+BuDg3ptXOra5zXXp/yPt6p8e1o7aJxcWubmLhuu0s37iD5hYlTuDIQRlcfryrn5iUm012XA385TroczhpZ/1Xp8dieq6YTRwJCQnk5eVFOgzTDagqGyvrWFJcweKiChYVbd81ZMWBxMcJSV4SSYoPkB24jr9QTJ/HZvAfWb9na/LIXUknMSTpJIW89lqfEEdiILBrW1V9E4uLKli4bjurtlShComBOI4dmsH3vzGCSbnZTByeRXrbHtLPX+/q1C550rX2MyZMEgtl4Pn5+bp48eJIh2G6idZ5FxYXbWexN7ZRa8uiXokBJgx3vZCPHpxBnAgNzUEamltoaG6hcY+fwXaXU+s285OSGwgi3J59N1s0a49j3Xt3zsZgS1gJKiXBNY2dnOfqJ8YNzSQ5YT9NtJe/AM/NhJN/Dt/4SSd9c6anEZElqprfdn3MPnEY06qmwdUDLC7ezuIiN1JqjTd5zsCMZCYOz9r1V/uYAemd0AR1PGx+CR45k7/GzYaZr+1zXDFVpSmoNAZbaGgKej9bQn4GSQwEGDMwPfxpSHdugX/+GAZPdJMzGdNBljhMzNmyo961KCquYHHxdlZu3kmwRRGBMQN6c/6EIeTnZpGfm83gTJ+KcAYeAxc/5joHzr0SLnu23fHFRITEeCExPo60pE7476oKL98ATfUw/YHo7Vdiopr91pgeLdii/GvrThYXV7iip6IKNla66T5TEgKMG5rJD08aycTcbMYP6+KRUg87Bc75I7x8Pcy7Ec77i/99PJY8AoVvuZkgu9MYWiaq+Jo4ROQM4B4gADykqrPbbM8C5gAjgXrgalVd7m27BbgGUOALYKaq1ovIHcD3gNbmULer6qt+XofpPmobm1m2oZIlRRUs9obVbp2gp296EpNys7h6ah6TcrM4YmDv8It3/DL+MtfH473fuD4e3/y5f59Vvgbe+DmMOBnyZ/n3OabH8y1xiEgAuBc4DSgBFonIPFVdEbLb7cAyVZ0uImO8/U8RkcHAjcBYVa0TkbnADOBv3nF/UNU7/YrddB/bdtbvShKLi7bz5aaqXUNqj+6fxrePGcSk3Czyh2czNDsl+vrsAHzjpy55fPA7lzwmXtX5n9EShBevc8Vh0+7tOdPbmojw84ljMlCoqmsBRORpYBoQmjjGAr8BUNVVIpIrIv1DYksRkSYgFdjkY6ymm6htbOaVzzfz8dpylhRXUOwNuJcUH8exQzO59uuu+emEYVlkpHaTOUlE4Nt3Q9VmeOUWNwnUqE6e6rfgbij5BM5/CDIGd+65TczxM3EMBjaELJcAbWe1+Qw4H5gvIpOB4cAQVV0iIncC64E64E1VfTPkuBtE5EpgMXCrqla0/XARuRa4FmDYsGGddEkmUrZW1fO3BUU8uXA9O+qayOmVyMThWVx+3HAm5mZx1KCM7j0qayABLn4UHjkL5l4FM1+FQeM659ybP4d3fwNHTnfzoxtziPxMHO2VCbRtkT4buEdEluHqMZYCzV7dxzQgD6gEnhWRy1X1ceA+4L+9c/03cBdw9V4fpPoA8AC4fhydcD0mAr7ctIOHP1zHPz7fRLBFOf3IAVzztTwmDMuKzmKnQ5GU7lpXPXQqPHERXPO2G8zyUDTVuzk2UnPg7P+NqgEWTfflZ+IoAYaGLA+hTXGTqlYBMwHE3QXWea/TgXWqWuptewE4EXhcVbe2Hi8iDwKv+HgNJgJaWpT3/1XKgx+uZcGaclITA1x23HCunpLHsJwePllV+gA3CdScb8ETF8LVb7hZJA/Wu7+CbSvcOQ/lPMaE8DNxLAJGiUgesBFXuX1p6A4ikgnUqmojrgXVB6paJSLrgeNFJBVXVHUKrlgKERmoqpu9U0wHlvt4DaYL1TcFeXHpRh6ev47CbdUM6J3MbWeO4ZLJw8hI6Sb1FZ2h3xiY8RT8/Tx4+jK44kVIOIiBKosKYMGfIf/qzq8zMTHNt8Shqs0icgPwBq457hxV/VJErvO23w8cATwmIkFcpfksb9tCEXkO+BRoxhVhPeCd+nciMg5XVFUEfN+vazBdo6y6gb9/VMzjHxdTXtPIkYN6c/d3xnH2MQMj31w2UnKnuEERn58FL10HF8zpWEuo+ip3XFYunPbfvoVpYpONVWUiZvXWnTw8fx0vLN1IY3MLpx7Rj1lTR3D8iOyeV39xsArugbf+E064AU7/dfjHvXwDLHsCZr4Ow9q2STEmPDZWlYkKqsqCNeU8+OFa3vuqlKT4OC6cOIRZU/MY2Tct0uFFnxNvdPN4fPRnN1f9cWE8YK96FZb+3Y1DZUnD+MASh+kSjc0t/OOzTTw0fx0rN1fRJy2RH582msuPH052r8RIhxe9ROCM2VC1CV77qevjccQ5+96/pgz+cSMMOBpO+lnXxWliiiUO46vK2kaeWLieRxcUsW1nA6P7p/G7C47h3HGD9j/st9ktLgDnPwiPnQvPXwNXzmv/SUIV/nGTm7v+ynlurnpjfGCJw/iiqKyGOQXreHZxCXVNQb42qg+/v+hYvj6qj9VfHIzEVLjkaXj4NHhqBsx6a+9BCj97Cla94irD+4+NTJwmJljiMJ1GVVlUVMFDH67lrZVbSYiLY9q4Qcz6Wh5jBrQ/34TpgF59XH+Mh0+DJy6AWW9DWl+3rXK9K8oaPgVO+GFk4zQ9niUOc8iagy28unwLD3+4ls9KdpCZmsANJx/GFScMp1/6QfQ/MPuWMxIunQt/+7aby+O7r0B8Crx0PWiLG5o9zooAjb8scZiDVlXfxNxFG3ikoIiNlXWM6NOLX513FBdMGEJKot28fDMkHy58GJ65HJ6bBcNPhKIP4dw/u34bxvjMEofpsJKKWv5WUMTTizZQ3dDMcXnZ/PLcI/nmmH7ExVn9RZcYczac+Tt49d/gX6/B4WfB+MsjHZWJEZY4TNg+L6nkgQ/W8tryLQB8+5iBXDN1BEcPyYhwZDFq8vegphSWPw/n3GMDGJouY4nDHNBXW3Zy55tf8daKraQnx3PN1DyuOjGXQX7Nx23Cd/Ltrr+GJQ3ThSxxmH1aX17LH97+Fy8t20haYjy3njaamVPzSEuyX5uoYknDdDG7A5i9bKuq54//t5qnP9lAIE649usj+ME3RpKZah3KjDGWOEyIytpG7n9/LX9bsI7moDJj8lB+9M1R9O9tTWqNMbtZ4jDUNDTzSME6/vrBWqobmjlv3GBuPnUUw3N6RTo0Y0wUssQRwxqagzy5cD33vltIWXUjp43tz63fGm29vI0x+2WJIwY1B1t4YelG7nl7NRsr6zhhRA4PXHk4E4ZlRTo0Y0w3YIkjhqgqry3fwl1vfsWa0hqOGZLB7AuOZuphNvCgMSZ8ljhigKry4eoyfv/GV3yxcQeH9Uvj/ssncPqRAyxhGGM6zBJHD7ekuILfvb6Kheu2MzgzhTsvOpbp4wcTsKFBjDEHyRJHD7VycxV3vfkVb6/cRp+0JH557pHMmDyUpHgbfNAYc2gscfQwRWU1/OHtfzHvs02kJ8Xz76cfzswpuaQm2j+1MaZz2N2kh9iyw/X2nrtoAwmBOH7wjZF8/+sjyUhNiHRoxpgexhJHN1dR08h976/h0QVFtKhy6XHDuOGbh9kESsYY31ji6KaqG5p5+MN1PPjhWmoam5k+fjC3nDqaodmpkQ7NGNPD+Zo4ROQM4B4gADykqrPbbM8C5gAjgXrgalVd7m27BbgGUOALYKaq1otINvAMkAsUAReraoWf1xFN6puCPP5xMX95bw3baxo5/cj+3PqtwxndPz3SoRljYkScXycWkQBwL3AmMBa4RETGttntdmCZqh4DXIlLMojIYOBGIF9Vj8IlnhneMbcB76jqKOAdb7nHaw628PQn6zn5zvf41T9XMnZgb1764RT+ekW+JQ1jTJfy84ljMlCoqmsBRORpYBqwImSfscBvAFR1lYjkikj/kNhSRKQJSAU2eeunASd57x8F3gN+6t9lRN5Ha8r5+YtfsLashmOHZnLXRcdy4mF9Ih2WMSZG+Zk4BgMbQpZLgOPa7PMZcD4wX0QmA8OBIaq6RETuBNYDdcCbqvqmd0x/Vd0MoKqbRaRfex8uItcC1wIMGzasky4pMn72wuc0tygPXDGR08b2t97expiI8q2oCmjv7qZtlmcDWSKyDPgRsBRo9uo+pgF5wCCgl4hc3pEPV9UHVDVfVfP79u3b4eCjxYbttRSV13L1lDy+ZUOEGGOigJ9PHCXA0JDlIewubgJAVauAmQDi7ojrvNfpwDpVLfW2vQCcCDwObBWRgd7TxkBgm4/XEHEL1pQBMHWUFU0ZY6KDn08ci4BRIpInIom4yu15oTuISKa3DVwLqg+8ZLIeOF5EUr2Ecgqw0ttvHnCV9/4q4GUfryHiCgrL6ZuexKh+aZEOxRhjAB+fOFS1WURuAN7AtYqao6pfish13vb7gSOAx0QkiKs0n+VtWygizwGfAs24IqwHvFPPBuaKyCxcgrnIr2uItJYWpaCwjK+NsmHPjTHRw9d+HKr6KvBqm3X3h7z/CBi1j2N/AfyinfXluCeQHu+rrTspr2lkirWgMsZEET+LqswhKih09RuWOIwx0cQSRxSbX1jGiL69GJSZEulQjDFmF0scUaqxuYWFa7cz1Z42jDFRJqzEISLPi8jZImKJposs21BJXVPQiqmMMVEn3ERwH3ApsFpEZovIGB9jMrhiqjiB40fkRDoUY4zZQ1iJQ1XfVtXLgAm4EWnfEpEFIjJTRGymIB8UFJZx9JBMMlLs6zXGRJewi55EJAf4Lq6j3lLcSLYTgLd8iSyG7axvYtmGSqYeZk8bxpjoE1Y/Dm/IjzHA34FzWgcZBJ4RkcV+BRerFq7dTrBFrX7DGBOVwu0A+GdV/b/2NqhqfifGY4CCNWUkJ8QxYVhWpEMxxpi9hFtUdYSIZLYuiEiWiFzvT0imoLCMSbnZJCcEIh2KMcbsJdzE8T1VrWxd8KZq/Z4vEcW4bVX1/GtrtRVTGWOiVriJI05CRtnzpoVN3M/+5iAVtA6jbonDGBOlwq3jeAM3Iu39uMmYrgNe9y2qGDZ/dTmZqQmMHdg70qEYY0y7wk0cPwW+D/wAN7Pfm8BDfgUVq1TdMOpTRvYhLs6GUTfGRKewEoeqtuB6j9/nbzixbW1ZDVuq6q1+wxgT1cLtxzEK+A0wFkhuXa+qI3yKKybtHkbdOv4ZY6JXuJXjj+CeNpqBk4HHcJ0BTSeav7qMIVkpDMtOjXQoxhizT+EmjhRVfQcQVS1W1TuAb/oXVuxpDrbw0dpyph5m08QaY6JbuJXj9d6Q6qu9ecQ3Av38Cyv2fLFxBzvrm61+wxgT9cJ94rgZSAVuBCYClwNX+RRTTGqt3zhxpNVvGGOi2wGfOLzOfher6r8D1cBM36OKQQWF5Ywd2JuctKRIh2KMMft1wCcOVQ0CE8UK3n1T1xhkSXGFtaYyxnQL4dZxLAVeFpFngZrWlar6gi9RxZhFRdtpDLZY/YYxplsIN3FkA+Xs2ZJKAUscnaCgsIyEgDA5LzvSoRhjzAGF23P8oOo1ROQM3EyBAeAhVZ3dZnsWMAcYCdQDV6vqchE5HHgmZNcRwH+q6t0icgduZN5Sb9vtqvrqwcQXLeYXljFhWBapieHmcWOMiZxwe44/gnvC2IOqXr2fYwLAvcBpQAmwSETmqeqKkN1uB5ap6nQRGePtf4qqfgWMCznPRuDFkOP+oKp3hhN7tNte08iKzVX8+NTRkQ7FGGPCEu6fuK+EvE8GpgObDnDMZKBQVdcCiMjTwDQgNHGMxQ1lgqquEpFcEemvqltD9jkFWKOqxWHG2q18tKYcVZgyyuo3jDHdQ7hFVc+HLovIU8DbBzhsMLAhZLkEOK7NPp8B5wPzRWQyMBwYAoQmjhnAU22Ou0FErgQWA7d6E0t1S/MLy0hPiueYwRmRDsUYY8ISbgfAtkYBww6wT3vNd9sWd80GskRkGfAjXOut5l0nEEkEzgWeDTnmPlydyDhgM3BXux8ucq2ILBaRxaWlpe3tEhUKCss4bkQO8YGD/acwxpiuFW4dx072vOlvwc3RsT8lwNCQ5SG0Kd5S1Sq8DoVeP5F13qvVmcCnoUVXoe9F5EH2LEYLPfcDwAMA+fn5e9XPRIP15bWs317L1VNyIx2KMcaELdyiqvSDOPciYJSI5OEqt2cAl4buICKZQK2qNgLXAB94yaTVJbQpphKRgaq62VucDiw/iNiiwq5pYq1+wxjTjYRVPiIi00UkI2Q5U0TO298xqtoM3ICbdnYlMFdVvxSR60TkOm+3I4AvRWQV7unippDPSMW1yGrbV+R3IvKFiHyOG+L9lnCuIRoVFJbRv3cSI/umRToUY4wJW7itqn6hqruaw6pqpYj8Anhpfwd5/StebbPu/pD3H+HqS9o7thbYawwOVb0izJijWkuLsmBNOScd3teGUTfGdCvh1si2t5/1VjsEK7dUsb2mkSkjrZjKGNO9hJs4FovI/4rISBEZISJ/AJb4GVhPt3uaWEscxpjuJdzE8SOgETcMyFygDvihX0HFgvmF5RzWL40BGckH3tkYY6JIuK2qaoDbfI4lZjQ0B1m0bjvfmTT0wDsbY0yUCbdV1Vte09nW5SwRecO3qHq4pesrqWsKWjGVMaZbCreoqo+qVrYueEN82JzjB6mgsIw4geNG2DDqxpjuJ9zE0SIiu4YYEZFc2hkt14RnfmEZxw7NpHdyQqRDMcaYDgu3Se3PcQMRvu8tfx241p+Qeraq+iY+21DJD08+LNKhGGPMQQm3cvx1EcnHJYtlwMu4llWmgz5eU06LWjNcY0z3Fe4gh9fghgMZgkscxwMfsedUsiYMC9aUk5IQYPywzEiHYowxByXcOo6bgElAsaqeDIxn99StpgPmF5YxOS+bpPhApEMxxpiDEm7iqFfVegARSVLVVcDh/oXVM23ZUU/htmqmHLbXEFzGGNNthFs5XuL143gJeEtEKjjw1LGmDRtmxBjTE4RbOT7de3uHiLwLZACv+xZVD1VQWEZ2r0SOGNA70qEYY8xB6/AIt6r6/oH3Mm2pKgVryjhxZA5xcTaMujGm+7KJrrvImtJqtlY1MNWKqYwx3Zwlji4yf7XVbxhjegZLHF1kfmE5w7JTGZqdGulQjDHmkFji6ALNwRY+XltuTxvGmB7BEkcX+KxkB9UNzVa/YYzpESxxdIEFhWWIwAkjreOfMab7s8TRBeYXlnHkoN5k90qMdCjGGHPILHH4rLaxmU/XV1j9hjGmx7DE4bNP1m2nKahMGWmJwxjTM1ji8FlBYRmJgTgm5do0scaYnsHXxCEiZ4jIVyJSKCK3tbM9S0ReFJHPReQTETnKW3+4iCwLeVWJyM3etmwReUtEVns/s/y8hkM1v7CcicOzSEm0YdSNMT2Db4lDRALAvcCZwFjgEhEZ22a324FlqnoMcCVwD4CqfqWq41R1HDARqAVe9I65DXhHVUcB73jLUam8uoGVm6uYOsqKqYwxPYefTxyTgUJVXauqjcDTwLQ2+4zF3fzx5vjIFZH+bfY5BVijqsXe8jTgUe/9o8B5PsTeKRasKQdsmBFjTM/iZ+IYDGwIWS7x1oX6DDgfQEQmA8Nx09OGmgE8FbLcX1U3A3g/+7X34SJyrYgsFpHFpaWRmaywoLCM9OR4jh6cEZHPN8YYP/iZONobO1zbLM8GskRkGfAjYCnQvOsEIonAucCzHf1wVX1AVfNVNb9v374dPfyQqSofri7jhBE5BGwYdWNMD9Lh+Tg6oAQYGrI8hDazBqpqFTATQEQEWOe9Wp0JfKqqW0PWbRWRgaq6WUQGAtv8CP5Qrd9ey8bKOr7/jRGRDsUYYzqVn08ci4BRIpLnPTnMAOaF7iAimd42gGuAD7xk0uoS9iymwjvHVd77q4CXOz3yTlBQaPUbxpieybcnDlVtFpEbgDeAADBHVb8Ukeu87fcDRwCPiUgQWAHMaj1eRFKB04Dvtzn1bGCuiMwC1gMX+XUNh6KgsIyBGcmM6NMr0qEYY0yn8rOoClV9FXi1zbr7Q95/BIzax7G1wF6jAqpqOa6lVdRqaXHTxJ56RH9cCZwxxvQc1nPcBys2V1FZ28SUw2w0XGNMz2OJwwfzC71pYm18KmNMD2SJwwcFhWWM7p9Gv97JkQ7FGGM6nSWOTlbfFGRR0XZrTWWM6bEscXSyT9dXUN/UYtPEGmN6LEscnaygsIxAnHDcCKsYN8b0TJY4Otn8wnLGDc0kLcnXls7GGBMxljg60Y66Jr4oqbT6DWNMj2aJoxN9vLacFsXqN4wxPZoljk5UUFhGamKAcUMzIx2KMcb4xhJHJ5pfWMZxedkkxtvXaozpuewO10k2VdaxtrTG6jeMMT2eJY5OUtA6zIglDmNMD2eJo5MUFJbRJy2Rw/unRzoUY4zxlSWOTqCqFKwp58SRfYizaWKNMT2cJY5OsHpbNaU7G6wZrjEmJlji6ATzV3v1G6MscRhjej5LHJ2goLCM3JxUBmemRDoUY4zxnSWOQ9QUbOHjteXWmsoYEzMscRyiz0sqqWkMWv2GMSZmWOI4RPNXlyMCJ4y0YdSNMbHBEschKigs4+jBGWSmJkY6FGOM6RKWOA5BTUMzn66vsPoNY0xMscRxCD5Zt53mFmXKSEscxpjY4WviEJEzROQrESkUkdva2Z4lIi+KyOci8omIHBWyLVNEnhORVSKyUkRO8NbfISIbRWSZ9zrLz2vYn/mFZSTGx5GfmxWpEIwxpsv5Nr+piASAe4HTgBJgkYjMU9UVIbvdDixT1ekiMsbb/xRv2z3A66p6oYgkAqkhx/1BVe/0K/ZwFRSWMSk3i+SEQKRDMcaYLuPnE8dkoFBV16pqI/A0MK3NPmOBdwBUdRWQKyL9RaQ38HXgYW9bo6pW+hhrh5XubGDVlp1Wv2GMiTl+Jo7BwIaQ5RJvXajPgPMBRGQyMBwYAowASoFHRGSpiDwkIr1CjrvBK96aIyLtlhOJyLUislhEFpeWlnbSJe22YI0bZsT6bxhjYo2fiaO9YWK1zfJsIEtElgE/ApYCzbgitAnAfao6HqgBWutI7gNGAuOAzcBd7X24qj6gqvmqmt+3b99Du5J2FBSWkZGSwJGDMjr93MYYE818q+PAPWEMDVkeAmwK3UFVq4CZACIiwDrvlQqUqOpCb9fn8BKHqm5tPV5EHgRe8Sn+fVJV5q8u44QROQRsGHVjTIzx84ljETBKRPK8yu0ZwLzQHbyWU609564BPlDVKlXdAmwQkcO9bacAK7xjBoacYjqw3MdraFdReS2bdtTbaLjGmJjk2xOHqjaLyA3AG0AAmKOqX4rIdd72+4EjgMdEJIhLDLNCTvEj4AkvsazFezIBfici43DFXkXA9/26hn1pnSbW6jeMMbHIz6IqVPVV4NU26+4Pef8RMGofxy4D8ttZf0XnRtlxBYVlDM5MITcn9cA7G2NMD2M9xzso2KIsWFPOlMNycNUyxhgTWyxxdNCXm3awo67J+m8YY2KWJY4Omu/Vb5xo41MZY2KUJY4OWlBYzpgB6fRNT4p0KMYYExGWODqgvinIJ0XbrZjKGBPTLHF0wJLiChqbW6wZrjEmplni6ID5hWXExwmT87IjHYoxxkSMJY4OKCgsY/ywTHol+dr9xRhjopoljjBV1jbyxcYdVr9hjIl5ljjC9PHaclRtmBFjjLHEEab5hWX0Sgxw7NDMSIdijDERZYkjTAWF5Rw/IoeEgH1lxpjYZnfBMJRU1LKurMbqN4wxBkscYVlQWA5gicMYY7DEEZaCNWX0SUtidP+0SIdijDERZ4njAFSVgsIyptow6sYYA1jiOKCvtu6krLrRiqmMMcZjieMA5q92w6hb4jDGGMcSxwEUFJYxom8vBmWmRDoUY4yJCpY49qOxuYWF67YzxSZtMsaYXSxx7MdnJZXUNgatmMoYY0JY4tiP+avLiBM4YUROpEMxxpioYYljPwZlJnPRxKFkpCZEOhRjjIkaNrHEfnxn0jC+M2lYpMMwxpio4usTh4icISJfiUihiNzWzvYsEXlRRD4XkU9E5KiQbZki8pyIrBKRlSJygrc+W0TeEpHV3s8sP6/BGGPMnnxLHCISAO4FzgTGApeIyNg2u90OLFPVY4ArgXtCtt0DvK6qY4BjgZXe+tuAd1R1FPCOt2yMMaaL+PnEMRkoVNW1qtoIPA1Ma7PPWNzNH1VdBeSKSH8R6Q18HXjY29aoqpXeMdOAR733jwLn+XgNxhhj2vAzcQwGNoQsl3jrQn0GnA8gIpOB4cAQYARQCjwiIktF5CER6eUd019VNwN4P/u19+Eicq2ILBaRxaWlpZ11TcYYE/P8TBztjQiobZZnA1kisgz4EbAUaMZV2k8A7lPV8UANHSySUtUHVDVfVfP79u3b0diNMcbsg5+tqkqAoSHLQ4BNoTuoahUwE0Dc0LPrvFcqUKKqC71dn2N34tgqIgNVdbOIDAS2+XcJxhhj2vLziWMRMEpE8kQkEZgBzAvdwWs5legtXgN8oKpVqroF2CAih3vbTgFWeO/nAVd5768CXvbxGowxxrTh2xOHqjaLyA3AG0AAmKOqX4rIdd72+4EjgMdEJIhLDLNCTvEj4AkvsazFezLBFW/NFZFZwHrgIr+uwRhjzN5EtW21Q88jIqVA8UEe3gco68Rwujv7Pnaz72JP9n3sqSd8H8NVda9K4phIHIdCRBaran6k44gW9n3sZt/Fnuz72FNP/j5srCpjjDEdYonDGGNMh1jiOLAHIh1AlLHvYzf7LvZk38eeeuz3YXUcxhhjOsSeOIwxxnSIJQ5jjDEdYoljPw40n0isEJGhIvKuNy/KlyJyU6RjigYiEvAG4Xwl0rFE2r7mz4lFInKL9/9kuYg8JSLJkY6ps1ni2Icw5xOJFc3Arap6BHA88MMY/i5C3cTueWJi3b7mz4kpIjIYuBHIV9WjcKNmzIhsVJ3PEse+hTOfSExQ1c2q+qn3fifuptB2iPyYIiJDgLOBhyIdS6QdYP6cWBQPpIhIPG7A1k0H2L/bscSxb+HMJxJzRCQXGA8sPMCuPd3dwE+AlgjHEQ32N39OTFHVjcCduHH0NgM7VPXNyEbV+Sxx7Fs484nEFBFJA54HbvaGxI9JIvJtYJuqLol0LFHikOfP6SlEJAtXMpEHDAJ6icjlkY2q81ni2LcDzicSS0QkAZc0nlDVFyIdT4RNAc4VkSJcEeY3ReTxyIYUUSXsPX/OhAjGE0mnAutUtVRVm4AXgBMjHFOns8SxbwecTyRWeJNsPQysVNX/jXQ8kaaqP1PVIaqai/u9+D9V7XF/VYbrAPPnxJr1wPEikur9vzmFHthQwM8ZALu1fc0nEuGwImUKcAXwhTfNL8Dtqvpq5EIyUWZf8+fEFFVdKCLPAZ/iWiMupQcOPWJDjhhjjOkQK6oyxhjTIZY4jDHGdIglDmOMMR1iicMYY0yHWOIwxhjTIZY4jIlyInKSjcBrooklDmOMMR1iicOYTiIil4vIJyKyTET+6s3XUS0id4nIpyLyjoj09fYdJyIfi8jnIvKiN8YRInKYiLwtIp95x4z0Tp8WMt/FE16vZGMiwhKHMZ1ARI4AvgNMUdVxQBC4DOgFfKqqE4D3gV94hzwG/FRVjwG+CFn/BHCvqh6LG+Nos7d+PHAzbm6YEbje/MZEhA05YkznOAWYCCzyHgZSgG24Ydef8fZ5HHhBRDKATFV931v/KPCsiKQDg1X1RQBVrQfwzveJqpZ4y8uAXGC+71dlTDsscRjTOQR4VFV/tsdKkf/XZr/9jfGzv+KnhpD3Qez/rokgK6oypnO8A1woIv0ARCRbRIbj/o9d6O1zKTBfVXcAFSLyNW/9FcD73hwnJSJynneOJBFJ7cqLMCYc9leLMZ1AVVeIyH8Ab4pIHNAE/BA3qdGRIrIE2IGrBwG4CrjfSwyho8leAfxVRP7LO8dFXXgZxoTFRsc1xkciUq2qaZGOw5jOZEVVxhhjOsSeOIwxxnSIPXEYY4zpEEscxhhjOsQShzHGmA6xxGGMMaZDLHEYY4zpkP8Pa8BLdF03n7YAAAAASUVORK5CYII=",
"text/plain": [
"<
keras.callbacks.History at 0x16bb95e80
>"
"<
Figure size 432x288 with 1 Axes
>"
]
},
"execution_count": 8,
"metadata": {},
"output_type": "execute_result"
"metadata": {
"needs_background": "light"
},
"output_type": "display_data"
}
],
"source": [
"hist"
"from matplotlib import pyplot as plt\n",
"\n",
"\n",
"plt.plot(hist.history['accuracy'])\n",
"plt.plot(hist.history['val_accuracy'])\n",
"plt.title('model accuracy')\n",
"plt.ylabel('accuracy')\n",
"plt.xlabel('epoch')\n",
"plt.legend(['train', 'val'], loc='upper left')\n",
"plt.show()"
]
},
{
...
...
%% Cell type:code id: tags:
```
python
import
sys
import
tensorflow
as
tf
import
keras
from
keras.models
import
Sequential
from
keras.layers
import
Dense
,
Dropout
,
Flatten
from
keras.layers.convolutional
import
Conv2D
,
MaxPooling2D
from
keras.utils
import
np_utils
import
numpy
as
np
```
%% Cell type:markdown id: tags:
%% Cell type:code id: tags:
```
python
(
x_train
,
y_train
),
(
x_test
,
y_test
)
=
keras
.
datasets
.
mnist
.
load_data
()
```
%% Cell type:code id: tags:
```
python
img_rows
=
28
img_cols
=
28
x_train
=
x_train
.
reshape
(
x_train
.
shape
[
0
],
img_rows
,
img_cols
,
1
)
x_test
=
x_test
.
reshape
(
x_test
.
shape
[
0
],
img_rows
,
img_cols
,
1
)
x_train
=
x_train
.
astype
(
'
float32
'
)
/
255.
x_test
=
x_test
.
astype
(
'
float32
'
)
/
255.
print
(
'
x_train shape:
'
,
x_train
.
shape
)
print
(
x_train
.
shape
[
0
],
'
train samples
'
)
print
(
x_test
.
shape
[
0
],
'
test samples
'
)
```
%% Output
x_train shape: (60000, 28, 28, 1)
60000 train samples
10000 test samples
%% Cell type:code id: tags:
```
python
def
ccee
(
y_true
,
y_pred
):
delta
=
1e-7
log_pred
=
tf
.
math
.
log
(
y_pred
+
delta
)
return
-
(
tf
.
reduce_sum
(
tf
.
reduce_sum
(
y_true
*
log_pred
,
axis
=
1
)))
/
y_true
.
shape
[
0
]
```
%% Cell type:code id: tags:
```
python
num_classes
=
10
y_train
=
keras
.
utils
.
np_utils
.
to_categorical
(
y_train
,
num_classes
)
y_test
=
keras
.
utils
.
np_utils
.
to_categorical
(
y_test
,
num_classes
)
```
%% Cell type:code id: tags:
```
python
def
create_model_
(
img_rows
,
img_cols
,
num_classes
):
inputs
=
tf
.
keras
.
Input
(
shape
=
(
28
,
28
,
1
))
x
=
tf
.
keras
.
layers
.
Conv2D
(
32
,
kernel_size
=
(
5
,
5
),
padding
=
'
same
'
,
activation
=
'
relu
'
)(
inputs
)
x
=
tf
.
keras
.
layers
.
MaxPool2D
(
pool_size
=
(
2
,
2
),
strides
=
(
2
,
2
))(
x
)
x
=
tf
.
keras
.
layers
.
Conv2D
(
64
,
kernel_size
=
(
2
,
2
),
padding
=
'
same
'
,
activation
=
'
relu
'
)(
x
)
x
=
tf
.
keras
.
layers
.
MaxPool2D
(
pool_size
=
(
2
,
2
))(
x
)
x
=
tf
.
keras
.
layers
.
Dropout
(
0.25
)(
x
)
x
=
tf
.
keras
.
layers
.
Flatten
()(
x
)
x
=
tf
.
keras
.
layers
.
Dense
(
1000
,
activation
=
'
leaky_relu
'
)(
x
)
x
=
tf
.
keras
.
layers
.
Dropout
(
0.5
)(
x
)
outputs
=
tf
.
keras
.
layers
.
Dense
(
num_classes
,
activation
=
'
softmax
'
)(
x
)
model
=
keras
.
Model
(
inputs
,
outputs
,
name
=
'
image_class
'
)
return
model
print
(
img_cols
,
img_rows
)
model
=
create_model_
(
img_rows
,
img_cols
,
num_classes
)
model
.
summary
()
```
%% Output
28 28
Model: "image_class"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_1 (InputLayer) [(None, 28, 28, 1)] 0
conv2d (Conv2D) (None, 28, 28, 32) 832
max_pooling2d (MaxPooling2D (None, 14, 14, 32) 0
)
conv2d_1 (Conv2D) (None, 14, 14, 64) 8256
max_pooling2d_1 (MaxPooling (None, 7, 7, 64) 0
2D)
dropout (Dropout) (None, 7, 7, 64) 0
flatten (Flatten) (None, 3136) 0
dense (Dense) (None, 1000) 3137000
dropout_1 (Dropout) (None, 1000) 0
dense_1 (Dense) (None, 10) 10010
=================================================================
Total params: 3,156,098
Trainable params: 3,156,098
Non-trainable params: 0
_________________________________________________________________
%% Cell type:code id: tags:
```
python
``
`
%%
Cell
type
:
code
id
:
tags
:
```
python
model.compile( loss = ccee,optimizer='adam', metrics= ['accuracy'] )
hist = model.fit(x_train, y_train, batch_size=10, epochs= 10, verbose=1, validation_data=(x_test, y_test))
```
%% Output
Epoch 1/10
1/6000 [..............................] - ETA: 17:42 - loss: 2.2814 - accuracy: 0.2000
2022-06-26 1
1:32:02.208949
: W tensorflow/core/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz
2022-06-26 1
2:24:22.787706
: W tensorflow/core/platform/profile_utils/cpu_utils.cc:128] Failed to get CPU frequency: 0 Hz
6000/6000 [==============================] -
9
6s 1
6
ms/step - loss: 0.12
36
- accuracy: 0.96
16
- val_loss: 0.0
493
- val_accuracy: 0.98
53
6000/6000 [==============================] - 6
8
s 1
1
ms/step - loss: 0.12
59
- accuracy: 0.96
08
- val_loss: 0.0
342
- val_accuracy: 0.98
88
Epoch 2/10
6000/6000 [==============================] -
150s 25
ms/step - loss: 0.06
80
- accuracy: 0.9
796
- val_loss: 0.0
501
- val_accuracy: 0.98
4
5
6000/6000 [==============================] -
77s 13
ms/step - loss: 0.06
57
- accuracy: 0.9
800
- val_loss: 0.0
442
- val_accuracy: 0.98
6
5
Epoch 3/10
6000/6000 [==============================] -
186s 31
ms/step - loss: 0.055
7
- accuracy: 0.98
35
- val_loss: 0.0
439
- val_accuracy: 0.9
856
6000/6000 [==============================] -
81s 13
ms/step - loss: 0.055
4
- accuracy: 0.98
27
- val_loss: 0.0
312
- val_accuracy: 0.9
902
Epoch 4/10
6000/6000 [==============================] -
206s 34
ms/step - loss: 0.05
17
- accuracy: 0.98
48
- val_loss: 0.03
37
- val_accuracy: 0.98
90
6000/6000 [==============================] -
91s 15
ms/step - loss: 0.05
03
- accuracy: 0.98
50
- val_loss: 0.03
85
- val_accuracy: 0.98
85
Epoch 5/10
6000/6000 [==============================] - 1
8
0s
30
ms/step - loss: 0.048
0
- accuracy: 0.986
1
- val_loss: 0.0
295
- val_accuracy: 0.9
912
6000/6000 [==============================] - 1
1
0s
18
ms/step - loss: 0.048
1
- accuracy: 0.986
2
- val_loss: 0.0
359
- val_accuracy: 0.9
887
Epoch 6/10
6000/6000 [==============================] - 1
59s 26
ms/step - loss: 0.04
36
- accuracy: 0.986
9
- val_loss: 0.0
415
- val_accuracy: 0.98
80
6000/6000 [==============================] - 1
10s 18
ms/step - loss: 0.04
81
- accuracy: 0.986
0
- val_loss: 0.0
637
- val_accuracy: 0.98
29
Epoch 7/10
6000/6000 [==============================] - 1
62s 27
ms/step - loss: 0.04
92
- accuracy: 0.98
6
3 - val_loss: 0.04
33
- val_accuracy: 0.9
876
6000/6000 [==============================] - 1
08s 18
ms/step - loss: 0.04
44
- accuracy: 0.98
7
3 - val_loss: 0.04
17
- val_accuracy: 0.9
903
Epoch 8/10
6000/6000 [==============================] - 1
39s 23
ms/step - loss: 0.04
54
- accuracy: 0.9875 - val_loss: 0.03
7
2 - val_accuracy: 0.990
1
6000/6000 [==============================] - 1
16s 19
ms/step - loss: 0.04
32
- accuracy: 0.9875 - val_loss: 0.032
5
- val_accuracy: 0.990
6
Epoch 9/10
6000/6000 [==============================] - 1
38s 23
ms/step - loss: 0.04
24
- accuracy: 0.988
3
- val_loss: 0.03
14
- val_accuracy: 0.9
905
6000/6000 [==============================] - 1
03s 17
ms/step - loss: 0.04
30
- accuracy: 0.988
6
- val_loss: 0.03
29
- val_accuracy: 0.9
898
Epoch 10/10
6000/6000 [==============================] -
133s 22
ms/step - loss: 0.04
21
- accuracy: 0.988
5
- val_loss: 0.0
401
- val_accuracy: 0.9
893
6000/6000 [==============================] -
97s 16
ms/step - loss: 0.04
48
- accuracy: 0.988
4
- val_loss: 0.0
318
- val_accuracy: 0.9
902
%% Cell type:code id: tags:
```
python
hist
from matplotlib import pyplot as plt
plt.plot(hist.history['accuracy'])
plt.plot(hist.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'val'], loc='upper left')
plt.show()
```
%% Output
<keras.callbacks.History at 0x16bb95e80>
%% Cell type:code id: tags:
```
python
```
%% Cell type:code id: tags:
```
python
```
%% Cell type:code id: tags:
```
python
```
...
...
This diff is collapsed.
Click to expand it.
Preview
0%
Loading
Try again
or
attach a new file
.
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Save comment
Cancel
Please
register
or
sign in
to comment