From ab10e648ba76fc81906ccee55d48cd957adc73e1 Mon Sep 17 00:00:00 2001 From: mikerabat Date: Mon, 1 Aug 2022 13:12:11 +0200 Subject: [PATCH 01/13] #88: Delphi AVX enhancements (simple... no AVX512) #85: Fixed opencl problem + Better cloning (way faster) + Fixed many Delphi warnings + New convolutional classes with swift and gelu + Automatic check if AVX is available + Fixed issues with output procedures: sometimes error proc pointed to the console output (althouth I wanted that in a file...) --- neural/CPUFeatures.pas | 354 ++++++++++++++++++ neural/Neural.AVX.pas | 112 ++++++ neural/Neural.AVXx64.pas | 120 +++++++ neural/neuralabfun.pas | 7 +- neural/neuralbit.pas | 2 +- neural/neuralbyteprediction.pas | 62 ++-- neural/neuraldatasets.pas | 13 +- neural/neuralevolutionary.pas | 1 - neural/neuralfit.pas | 17 +- neural/neuralnetwork.pas | 610 ++++++++++++++++++-------------- neural/neuralopencl.pas | 137 ++++--- neural/neuralthread.pas | 4 +- neural/neuralvolume.pas | 264 +++++++++++--- neural/neuralvolumev.pas | 4 +- 14 files changed, 1287 insertions(+), 420 deletions(-) create mode 100644 neural/CPUFeatures.pas create mode 100644 neural/Neural.AVX.pas create mode 100644 neural/Neural.AVXx64.pas diff --git a/neural/CPUFeatures.pas b/neural/CPUFeatures.pas new file mode 100644 index 00000000..666d3902 --- /dev/null +++ b/neural/CPUFeatures.pas @@ -0,0 +1,354 @@ +// ################################################################### +// #### This file is part of the mathematics library project, and is +// #### offered under the licence agreement described on +// #### http://www.mrsoft.org/ +// #### +// #### Copyright:(c) 2011, Michael R. . All rights reserved. +// #### +// #### Unless required by applicable law or agreed to in writing, software +// #### distributed under the License is distributed on an "AS IS" BASIS, +// #### WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// #### See the License for the specific language governing permissions and +// #### limitations under the License. +// ################################################################### + + +unit CPUFeatures; + +// unit to determine some cpu features + +interface + +function IsSSE3Present : boolean; +function IsAVXPresent : boolean; +function IsAVX512Present : boolean; +function IsFMAPresent : boolean; +function IsHardwareRNDSupport : boolean; +function IsHardwareRDSeed : boolean; + +function GetCurrentProcessorNumber : LongWord; register; + +implementation + +// ########################################### +// #### Global constants for features: + + +// base idea from https://stackoverflow.com/questions/6121792/how-to-check-if-a-cpu-supports-the-sse3-instruction-set +// misc +var HW_MMX: boolean = False; + HW_x64: boolean = False; + HW_ABM: boolean = False; // Advanced Bit Manipulation + HW_RDRAND: boolean = False; + HW_RDSEED: boolean = False; + HW_BMI1: boolean = False; + HW_BMI2: boolean = False; + HW_ADX: boolean = False; + HW_PREFETCHWT1: boolean = False; + + // SIMD: 128-bit + HW_SSE: boolean = False; + HW_SSE2: boolean = False; + HW_SSE3: boolean = False; + HW_SSSE3: boolean = False; + HW_SSE41: boolean = False; + HW_SSE42: boolean = False; + HW_SSE4a: boolean = False; + HW_AES: boolean = False; + HW_SHA: boolean = False; + + // SIMD: 256-bit + HW_AVX: boolean = False; + HW_XOP: boolean = False; + HW_FMA3: boolean = False; + HW_FMA4: boolean = False; + HW_AVX2: boolean = False; + + // SIMD: 512-bit + HW_AVX512F: boolean = False; // AVX512 Foundation + HW_AVX512CD: boolean = False; // AVX512 Conflict Detection + HW_AVX512PF: boolean = False; // AVX512 Prefetch + HW_AVX512ER: boolean = False; // AVX512 Exponential + Reciprocal + HW_AVX512VL: boolean = False; // AVX512 Vector Length Extensions + HW_AVX512BW: boolean = False; // AVX512 Byte + Word + HW_AVX512DQ: boolean = False; // AVX512 Doubleword + Quadword + HW_AVX512IFMA: boolean = False; // AVX512 Integer 52-bit Fused Multiply-Add + HW_AVX512VBMI: boolean = False; // AVX512 Vector Byte Manipulation Instructions + + AVX_OS_SUPPORT : boolean = False; // 256bit AVX supported in context switch + AVX512_OS_SUPPORT : boolean = False; // 512bit AVX supported in context switch + +// ############################################################## +// #### feature detection code +// ############################################################## + +type + TRegisters = record + EAX, + EBX, + ECX, + EDX: Cardinal; + end; + +{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF x64} + +function IsCPUID_Available : boolean; +begin + Result := true; +end; + +procedure GetCPUID(Param: Cardinal; out Registers: TRegisters); +var iRBX, iRDI : int64; +{$IFDEF FPC} +begin +{$ENDIF} +asm + mov iRBX, rbx; + mov iRDI, rdi; + +// .pushnv rbx; {save affected registers} +// .pushnv rdi; + + MOV RDI, Registers + MOV EAX, Param; + XOR RBX, RBX {clear EBX register} + XOR RCX, RCX {clear ECX register} + XOR RDX, RDX {clear EDX register} + DB $0F, $A2 {CPUID opcode} + MOV TRegisters(RDI).&EAX, EAX {save EAX register} + MOV TRegisters(RDI).&EBX, EBX {save EBX register} + MOV TRegisters(RDI).&ECX, ECX {save ECX register} + MOV TRegisters(RDI).&EDX, EDX {save EDX register} + + // epilog + mov rbx, iRBX; + mov rdi, IRDI; +{$IFDEF FPC} +end; +{$ENDIF} +end; + +{$ELSE} + +function IsCPUID_Available: Boolean; register; +{$IFDEF FPC} begin {$ENDIF} +asm + PUSHFD {save EFLAGS to stack} + POP EAX {store EFLAGS in EAX} + MOV EDX, EAX {save in EDX for later testing} + XOR EAX, $200000; {flip ID bit in EFLAGS} + PUSH EAX {save new EFLAGS value on stack} + POPFD {replace current EFLAGS value} + PUSHFD {get new EFLAGS} + POP EAX {store new EFLAGS in EAX} + XOR EAX, EDX {check if ID bit changed} + JZ @exit {no, CPUID not available} + MOV EAX, True {yes, CPUID is available} +@exit: +end; +{$IFDEF FPC} end; {$ENDIF} + +procedure GetCPUID(Param: Cardinal; var Registers: TRegisters); +{$IFDEF FPC} begin {$ENDIF} +asm + PUSH EBX {save affected registers} + PUSH EDI + MOV EDI, Registers + XOR EBX, EBX {clear EBX register} + XOR ECX, ECX {clear ECX register} + XOR EDX, EDX {clear EDX register} + DB $0F, $A2 {CPUID opcode} + MOV TRegisters(EDI).&EAX, EAX {save EAX register} + MOV TRegisters(EDI).&EBX, EBX {save EBX register} + MOV TRegisters(EDI).&ECX, ECX {save ECX register} + MOV TRegisters(EDI).&EDX, EDX {save EDX register} + POP EDI {restore registers} + POP EBX +end; +{$IFDEF FPC} end; {$ENDIF} + +{$ENDIF} + + +// ########################################### +// #### Local check for AVX support according to +// from https://software.intel.com/en-us/blogs/2011/04/14/is-avx-enabled +// and // from https://software.intel.com/content/www/us/en/develop/articles/how-to-detect-knl-instruction-support.html +procedure InitAVXOSSupportFlags; {$IFDEF FPC}assembler;{$ENDIF} +asm + {$IFDEF x64} + push rbx; + {$ELSE} + push ebx; + {$ENDIF} + + xor eax, eax; + cpuid; + cmp eax, 1; + jb @@endProc; + + mov eax, 1; + cpuid; + + and ecx, $018000000; // check 27 bit (OS uses XSAVE/XRSTOR) + cmp ecx, $018000000; // and 28 (AVX supported by CPU) + jne @@endProc; + + xor ecx, ecx ; // XFEATURE_ENABLED_MASK/XCR0 register number = 0 + db $0F, $01, $D0; //xgetbv ; // XFEATURE_ENABLED_MASK register is in edx:eax + and eax, $E6; //110b + cmp eax, $E6; //1110 0011 = zmm_ymm_xmm = (7 << 5) | (1 << 2) | (1 << 1); + jne @@not_supported; + {$IFDEF x64} + mov [rip + AVX512_OS_SUPPORT], 1; + {$ELSE} + mov AVX512_OS_SUPPORT, 1; + {$ENDIF} + @@not_supported: + + and eax, $6; //110b + cmp eax, $6; //1110 0011 = check for AVX os support (256bit) in a context switch + jne @@endProc; + {$IFDEF x64} + mov [rip + AVX_OS_SUPPORT], 1; + {$ELSE} + mov AVX_OS_SUPPORT, 1; + {$ENDIF} + + @@endProc: + + {$IFDEF x64} + pop rbx; + {$ELSE} + pop ebx; + {$ENDIF} +end; + +function GetCurrentProcessorNumber : LongWord; register; // stdcall; external 'Kernel32.dll'; +{$IFDEF FPC} +begin +{$ENDIF} +asm + mov eax, 1; + DB $0F, $A2; //cpuid; + shr ebx, 24; + mov eax, ebx; +{$IFDEF FPC} +end; +{$ENDIF} +end; + +procedure InitFlags; +var nIds : LongWord; + reg : TRegisters; +begin + if IsCPUID_Available then + begin + GetCPUID(0, reg); + nids := reg.EAX; + + + if nids >= 1 then + begin + GetCPUID(1, reg); + + HW_MMX := (reg.EDX and (1 shl 23)) <> 0; + HW_SSE := (reg.EDX and (1 shl 25)) <> 0; + HW_SSE2 := (reg.EDX and (1 shl 26)) <> 0; + HW_SSE3 := (reg.EDX and (1 shl 0)) <> 0; + + HW_SSSE3 := (reg.ECX and (1 shl 9)) <> 0; + HW_SSE41 := (reg.ECX and (1 shl 19)) <> 0; + HW_SSE42 := (reg.ECX and (1 shl 20)) <> 0; + HW_AES := (reg.ECX and (1 shl 25)) <> 0; + + HW_AVX := (reg.ECX and (1 shl 28)) <> 0; + HW_FMA3 := (reg.ECX and (1 shl 12)) <> 0; + + HW_RDRAND := (reg.ECX and (1 shl 30)) <> 0; + end; + + if nids >= 7 then + begin + GetCPUID($7, reg); + HW_AVX2 := (reg.EBX and (1 shl 5)) <> 0; + + HW_BMI1 := (reg.EBX and (1 shl 3)) <> 0; + HW_BMI2 := (reg.EBX and (1 shl 8)) <> 0; + HW_ADX := (reg.EBX and (1 shl 19)) <> 0; + HW_SHA := (reg.EBX and (1 shl 29)) <> 0; + HW_PREFETCHWT1 := (reg.EBX and (1 shl 0)) <> 0; + HW_RDSEED := (reg.EBX and (1 shl 18)) <> 0; + + HW_AVX512F := (reg.EBX and (1 shl 16)) <> 0; + HW_AVX512CD := (reg.EBX and (1 shl 28)) <> 0; + HW_AVX512PF := (reg.EBX and (1 shl 26)) <> 0; + HW_AVX512ER := (reg.EBX and (1 shl 27)) <> 0; + HW_AVX512VL := (reg.EBX and (1 shl 31)) <> 0; + HW_AVX512BW := (reg.EBX and (1 shl 30)) <> 0; + HW_AVX512DQ := (reg.EBX and (1 shl 17)) <> 0; + HW_AVX512IFMA := (reg.EBX and (1 shl 21)) <> 0; + HW_AVX512VBMI := (reg.ECX and (1 shl 1)) <> 0; + end; + + GetCPUID($80000000, reg); + + if reg.EAX >= $80000001 then + begin + GetCPUID($80000001, reg); + + HW_x64 := (reg.EDX and (1 shl 29)) <> 0; + HW_ABM := (reg.ECX and (1 shl 5)) <> 0; + HW_SSE4a := (reg.ECX and (1 shl 6)) <> 0; + HW_FMA4 := (reg.ECX and (1 shl 16)) <> 0; + HW_XOP := (reg.ECX and (1 shl 11)) <> 0; + end; + + // now check the os support + if (HW_AVX) or (HW_AVX2) then + InitAVXOSSupportFlags; + end; +end; + +function IsSSE3Present : boolean; +begin + Result := HW_SSE3; +end; + +function IsAVXPresent : boolean; +begin + Result := HW_AVX2 and AVX_OS_SUPPORT; +end; + +function IsAVX512Present : boolean; +begin + Result := HW_AVX512F and AVX512_OS_SUPPORT; +end; + +function IsFMAPresent : boolean; +begin + Result := AVX_OS_SUPPORT and HW_FMA3; +end; + +function IsHardwareRNDSupport : boolean; +begin + Result := HW_RDRAND; +end; + +function IsHardwareRDSeed : boolean; +begin + Result := HW_RDSEED; +end; + +initialization + InitFlags; + +end. diff --git a/neural/Neural.AVX.pas b/neural/Neural.AVX.pas new file mode 100644 index 00000000..c121e30f --- /dev/null +++ b/neural/Neural.AVX.pas @@ -0,0 +1,112 @@ +unit Neural.AVX; + +// ########################################### +// #### 32 bit intel avx functions +// ########################################### + +interface + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} +{$IFNDEF x64} + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} + +{$ENDIF} + +implementation + +{$IFNDEF x64} + +{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; +// eax = x, edx = y, ecx = N +asm + // iters + imul ecx, -4; + + // helper registers for the mt1, mt2 and dest pointers + sub eax, ecx; + sub edx, ecx; + + {$IFDEF FPC}vxorpd ymm0, ymm0, ymm0;{$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} + + // unrolled loop + @Loop1: + add ecx, 128; + jg @loopEnd1; + + {$IFDEF FPC}vmovupd ymm1, [eax + ecx - 128];{$ELSE}db $C5,$FD,$10,$4C,$08,$80;{$ENDIF} + {$IFDEF FPC}vmovupd ymm2, [edx + ecx - 128];{$ELSE}db $C5,$FD,$10,$54,$0A,$80;{$ENDIF} + {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF FPC}vmovupd ymm3, [eax + ecx - 96];{$ELSE}db $C5,$FD,$10,$5C,$08,$A0;{$ENDIF} + {$IFDEF FPC}vmovupd ymm4, [edx + ecx - 96];{$ELSE}db $C5,$FD,$10,$64,$0A,$A0;{$ENDIF} + {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + {$IFDEF FPC}vmovupd ymm1, [eax + ecx - 64];{$ELSE}db $C5,$FD,$10,$4C,$08,$C0;{$ENDIF} + {$IFDEF FPC}vmovupd ymm2, [edx + ecx - 64];{$ELSE}db $C5,$FD,$10,$54,$0A,$C0;{$ENDIF} + {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF FPC}vmovupd ymm3, [eax + ecx - 32];{$ELSE}db $C5,$FD,$10,$5C,$08,$E0;{$ENDIF} + {$IFDEF FPC}vmovupd ymm4, [edx + ecx - 32];{$ELSE}db $C5,$FD,$10,$64,$0A,$E0;{$ENDIF} + {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + {$IFDEF FPC}vextractf128 xmm2, ymm0, 1;{$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} + {$IFDEF FPC}vhaddps xmm0, xmm0, xmm2;{$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} + + sub ecx, 128; + jz @loop2End; + + // loop to get all fitting into an array of 4 + @Loop2: + add ecx, 16; + jg @Loop2End; + + {$IFDEF FPC}vmovupd xmm3, [eax + ecx - 16];{$ELSE}db $C5,$F9,$10,$5C,$08,$F0;{$ENDIF} + {$IFDEF FPC}vmovupd xmm4, [edx + ecx - 16];{$ELSE}db $C5,$F9,$10,$64,$0A,$F0;{$ENDIF} + {$IFDEF FPC}vmulps xmm3, xmm3, xmm4;{$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddps xmm0, xmm0, xmm3;{$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub ecx, 16; + jz @loop3End; + + @loop3: + add ecx, 4; + jg @loop3End; + + {$IFDEF FPC}vmovss xmm3, [eax + ecx - 4];{$ELSE}db $C5,$FA,$10,$5C,$08,$FC;{$ENDIF} + {$IFDEF FPC}vmovss xmm4, [edx + ecx - 4];{$ELSE}db $C5,$FA,$10,$64,$0A,$FC;{$ENDIF} + {$IFDEF FPC}vmulss xmm3, xmm3, xmm4;{$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddss xmm0, xmm0, xmm3;{$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop3; + @loop3End: + + // build result + {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF FPC}vzeroupper;{$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + +{$ENDIF} + +end. diff --git a/neural/Neural.AVXx64.pas b/neural/Neural.AVXx64.pas new file mode 100644 index 00000000..f4c1c870 --- /dev/null +++ b/neural/Neural.AVXx64.pas @@ -0,0 +1,120 @@ +unit Neural.AVXx64; + +// ########################################### +// #### 64 bit intel avx functions +// ########################################### + +interface + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF x64} + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC}assembler;{$ENDIF} + +{$ENDIF} + +implementation + +{$IFDEF x64} + +{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; +asm + {$IFDEF UNIX} + // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI + // The parameters are passed in the following order: + // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 + mov r8, rdx; + mov rdx, rsi; + mov rcx, rdi; + {$ENDIF} + + // iters + imul r8, -4; + + // helper registers for the mt1, mt2 and dest pointers + sub rcx, r8; + sub rdx, r8; + + {$IFDEF FPC}vxorpd ymm0, ymm0, ymm0;{$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} + + // unrolled loop + @Loop1: + add r8, 128; + jg @loopEnd1; + + {$IFDEF FPC}vmovupd ymm1, [rcx + r8 - 128];{$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$80;{$ENDIF} + {$IFDEF FPC}vmovupd ymm2, [rdx + r8 - 128];{$ELSE}db $C4,$A1,$7D,$10,$54,$02,$80;{$ENDIF} + {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF FPC}vmovupd ymm3, [rcx + r8 - 96];{$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$A0;{$ENDIF} + {$IFDEF FPC}vmovupd ymm4, [rdx + r8 - 96];{$ELSE}db $C4,$A1,$7D,$10,$64,$02,$A0;{$ENDIF} + {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + {$IFDEF FPC}vmovupd ymm1, [rcx + r8 - 64];{$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$C0;{$ENDIF} + {$IFDEF FPC}vmovupd ymm2, [rdx + r8 - 64];{$ELSE}db $C4,$A1,$7D,$10,$54,$02,$C0;{$ENDIF} + {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF FPC}vmovupd ymm3, [rcx + r8 - 32];{$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$E0;{$ENDIF} + {$IFDEF FPC}vmovupd ymm4, [rdx + r8 - 32];{$ELSE}db $C4,$A1,$7D,$10,$64,$02,$E0;{$ENDIF} + {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + {$IFDEF FPC}vextractf128 xmm2, ymm0, 1;{$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} + {$IFDEF FPC}vhaddps xmm0, xmm0, xmm2;{$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} + + sub r8, 128; + jz @loop2End; + + // loop to get all fitting into an array of 4 + @Loop2: + add r8, 16; + jg @Loop2End; + + {$IFDEF FPC}vmovupd xmm3, [rcx + r8 - 16];{$ELSE}db $C4,$A1,$79,$10,$5C,$01,$F0;{$ENDIF} + {$IFDEF FPC}vmovupd xmm4, [rdx + r8 - 16];{$ELSE}db $C4,$A1,$79,$10,$64,$02,$F0;{$ENDIF} + {$IFDEF FPC}vmulps xmm3, xmm3, xmm4;{$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddps xmm0, xmm0, xmm3;{$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub r8, 16; + jz @loop3End; + + @loop3: + add r8, 4; + jg @loop3End; + + {$IFDEF FPC}vmovss xmm3, [rcx + r8 - 4];{$ELSE}db $C4,$A1,$7A,$10,$5C,$01,$FC;{$ENDIF} + {$IFDEF FPC}vmovss xmm4, [rdx + r8 - 4];{$ELSE}db $C4,$A1,$7A,$10,$64,$02,$FC;{$ENDIF} + {$IFDEF FPC}vmulss xmm3, xmm3, xmm4;{$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF FPC}vaddss xmm0, xmm0, xmm3;{$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop3; + @loop3End: + + // build result + {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF FPC}vzeroupper;{$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + +{$ENDIF} + +end. diff --git a/neural/neuralabfun.pas b/neural/neuralabfun.pas index 07702375..18e31255 100644 --- a/neural/neuralabfun.pas +++ b/neural/neuralabfun.pas @@ -106,7 +106,7 @@ function CreateValidBinaryTest(Val1, Val2: byte; pOp1, pOp2: integer; RelOp1, Re // this array maps OpCode into its string representation const - csStrOp: array[0..csMaxOperations - 1] of string[15] = + csStrOp: array[0..csMaxOperations - 1] of string = //0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 ('nop', '=', '=', '<>', '>', '<', 'V', ' := ', 'inc', 'dec', '+', '-', '*', 'div', 'mod', //15 16 17 18 19 @@ -699,7 +699,7 @@ procedure TCreateValidOperations.Create(Tests, FullEqual: boolean; var ERRORS: array of byte); var LocalNonZeroPrevStates, NonZeroErrors: TPositionArray; - LocalNonZeroPrevStatesCount, NonZeroErrorsCount: integer; + LocalNonZeroPrevStatesCount(*, NonZeroErrorsCount*): integer; LocalNumberOfPreviousStates: integer; LocalPreviousStates: array of byte; OnAction: boolean; @@ -866,7 +866,7 @@ procedure TCreateValidOperations.Create(Tests, FullEqual: boolean; Clear; SetLength(NonZeroErrors,Self.NumberOfCurrentStates); NumberOfErrors := NumberOfNextStates; - NonZeroErrorsCount := getNonZeroElementsPos(NumberOfErrors, ERRORS, NonZeroErrors); + (*NonZeroErrorsCount := *)getNonZeroElementsPos(NumberOfErrors, ERRORS, NonZeroErrors); if not(FCS.TestOnStates) then RunOnActionFlag := 1 else if not(FCS.TestOnActions) then RunOnActionFlag := 0 @@ -1012,6 +1012,7 @@ function TRunOperation.LocalTestTests(var Tests: TTestsClass): integer; efeito: byte; PermissibleErrors: ShortInt; begin + PermissibleErrors := 0; if Tests.N > 0 then begin PermissibleErrors := Tests.N - Tests.TestThreshold; diff --git a/neural/neuralbit.pas b/neural/neuralbit.pas index d2a1b309..6199ee47 100644 --- a/neural/neuralbit.pas +++ b/neural/neuralbit.pas @@ -925,4 +925,4 @@ procedure Clear(var VARS: array of extended); VARS[Cont] := 0; end; -end. { of unit } +end. diff --git a/neural/neuralbyteprediction.pas b/neural/neuralbyteprediction.pas index 6da031f0..73ccd061 100644 --- a/neural/neuralbyteprediction.pas +++ b/neural/neuralbyteprediction.pas @@ -258,6 +258,7 @@ interface // This function returns the probability to win of a given neuron from position pos. function ProbToWin(neuronPos: longint): extended; + public // This function returns all relation indexes with a minimun number of victories (selections) // and a minimum probability MinF. procedure SelectBestIndexes(MinimumNumberOfVictories: longint; MinF: extended); @@ -371,6 +372,7 @@ function TClassifier.AddState(pLabel: integer; pState: array of byte): integer; CurrentState[0] := 0; NextState[0] := pLabel; FStates[FNextFreePos].FTester.Load(FCS, pState, CurrentState, NextState); + Result := FNextFreePos; Inc(FNextFreePos); end; @@ -485,7 +487,7 @@ function TClassifier.MutateNeuronGroup(NG: TNeuronGroup): TNeuronGroup; if (NG.TestNeuronLayer.N > 10) then begin NG.TestNeuronLayer.TestThreshold := - NG.TestNeuronLayer.N - Random(NG.TestNeuronLayer.N div 10); + NG.TestNeuronLayer.N - Random(Integer(NG.TestNeuronLayer.N div 10)); end else begin @@ -718,10 +720,11 @@ procedure TEasyLearnAndPredictClass.Predict(var pActions, pCurrentState: array o begin ABCopy(aActions, pActions); ABCopy(aCurrentState, pCurrentState); + idxCache := -1; if FUseCache then idxCache := FCache.Read(pActions, pPredictedState); Equal := ABCmp(pActions, pCurrentState); - if FUseCache and (idxCache <> -1) and Equal then + if (idxCache <> -1) and Equal then begin FCached := True; end @@ -866,28 +869,37 @@ procedure TStatePredictionClass.AddNeuronsFromStringFromPos(var str: string; pos pStatelen: integer; inputNeuronCnt: integer; begin - version := 1; S := TStringList.Create; - S.Sorted := false; - S.Delimiter := chr(10); - S.StrictDelimiter := true; - S.DelimitedText := str; - - version := StrToInt(S[0]); - evaluation := StrToInt(S[1]); - pActionLen := StrToInt(S[2]); - pStatelen := StrToInt(S[3]); - - //TODO: treat above info here. - - if (S.Count>4) then - begin - neuronPos := pos; - for inputNeuronCnt := 4 to S.Count-1 do - begin - FNN[neuronPos].LoadFromString(S[inputNeuronCnt]); - inc(neuronPos); - end; + try + S.Sorted := false; + S.Delimiter := chr(10); + S.StrictDelimiter := true; + S.DelimitedText := str; + + version := StrToInt(S[0]); + evaluation := StrToInt(S[1]); + pActionLen := StrToInt(S[2]); + pStatelen := StrToInt(S[3]); + + if version <> 1 then + raise Exception.Create( 'Version V' + IntToStr( version ) + '.' + IntToStr(evaluation) + ' found but V1.0 expected'); + + if pActionLen <> FActionByteLen then + raise Exception.Create('Action length differs'); + if pStateLen <> FStateByteLen then + raise Exception.Create('State length differs'); + + if (S.Count>4) then + begin + neuronPos := pos; + for inputNeuronCnt := 4 to S.Count-1 do + begin + FNN[neuronPos].LoadFromString(S[inputNeuronCnt]); + inc(neuronPos); + end; + end; + finally + S.Free; end; end; @@ -1057,11 +1069,11 @@ function TStatePredictionClass.GetBestNeuronIndex(var posBest: longint; if (Actual > Best) then begin - Best := actual; + // Best := actual; // this is due to the exit... posBest := neuronPos; R := True; Result := R; - exit; + exit; // end; end; end; diff --git a/neural/neuraldatasets.pas b/neural/neuraldatasets.pas index 0ac9a79f..ee561449 100644 --- a/neural/neuraldatasets.pas +++ b/neural/neuraldatasets.pas @@ -27,7 +27,7 @@ interface uses - {$IFNDEF FPC}System.Classes,{$ENDIF} + {$IFNDEF FPC} {$IF (CompilerVersion <= 21)} Classes, {$ELSE} System.Classes, {$IFEND} {$ENDIF} neuralvolume, neuralnetwork {$IFDEF FPC}, FPimage, FPReadBMP, FPReadPCX, FPReadJPEG, FPReadPNG, @@ -277,7 +277,7 @@ implementation uses SysUtils, math, neuralthread, - {$IFDEF FPC}fileutil{$ELSE} Winapi.Windows{$ENDIF}; + {$IFDEF FPC}fileutil{$ELSE} Windows{$ENDIF}; {$IFDEF FPC} procedure CreateVolumesFromImagesFromFolder(out ImgTrainingVolumes, ImgValidationVolumes, @@ -822,12 +822,13 @@ function SwapEndian(I:integer):integer; {$ENDIF} procedure TranslateCifar10VolumesToMachineAnimal(VolumeList: TNNetVolumeList); -var - Volume: TNNetVolume; +var i : integer; + Volume: TNNetVolume; begin - for Volume in VolumeList do + for i := 0 to VolumeList.Count - 1 do begin - Volume.Tag := csMachineAnimalCifar10Pos[Volume.Tag]; + volume := volumeList[i]; + Volume.Tag := csMachineAnimalCifar10Pos[Volume.Tag]; end; end; diff --git a/neural/neuralevolutionary.pas b/neural/neuralevolutionary.pas index 84b2c803..64344fc0 100644 --- a/neural/neuralevolutionary.pas +++ b/neural/neuralevolutionary.pas @@ -85,7 +85,6 @@ TEvolutionary = class(TObject) function Evolve(Element: T; RunCnt: integer): T; constructor Create(pAlwaysGetBest: boolean = False; pKidsPerFather: integer = 10); destructor Destroy; override; - published property AlwaysGetBest: boolean read FAlwaysGetBest write FAlwaysGetBest; property KidsPerFather: integer read FKidsPerFather write FKidsPerFather; property LastEval: double read FLastEval; diff --git a/neural/neuralfit.pas b/neural/neuralfit.pas index 588aac91..ae12acb0 100644 --- a/neural/neuralfit.pas +++ b/neural/neuralfit.pas @@ -94,7 +94,7 @@ TNeuralFitBase = class(TMObject) FProcs: TNeuralThreadList; procedure CheckLearningRate(iEpochCount: integer); public - constructor Create(); override; + constructor Create(); destructor Destroy(); override; procedure WaitUntilFinished; {$IFDEF OpenCL} @@ -119,6 +119,7 @@ TNeuralFitBase = class(TMObject) property InitialEpoch: integer read FInitialEpoch write FInitialEpoch; property InitialLearningRate: single read FInitialLearningRate write FInitialLearningRate; property LearningRateDecay: single read FLearningRateDecay write FLearningRateDecay; + property MinLearnRate : single read FMinLearnRate write FMinLearnRate; property LoadBestAtEnd: boolean read FLoadBestAdEnd write FLoadBestAdEnd; property L2Decay: single read FL2Decay write FL2Decay; property MaxThreadNum: integer read FMaxThreadNum write FMaxThreadNum; @@ -152,7 +153,7 @@ TNeuralFitWithImageBase = class(TNeuralFitBase) FColorEncoding: integer; FChannelShiftRate: TNeuralFloat; public - constructor Create(); override; + constructor Create(); destructor Destroy(); override; procedure ClassifyImage(pNN: TNNet; pImgInput, pOutput: TNNetVolume); procedure EnableDefaultImageTreatment(); virtual; @@ -187,7 +188,7 @@ TNeuralDataLoadingFit = class(TNeuralFitWithImageBase) FGetTrainingProc, FGetValidationProc, FGetTestProc: TNNetGet2VolumesProc; function DefaultLossFn(ExpectedOutput, FoundOutput: TNNetVolume; ThreadId: integer): TNeuralFloat; public - constructor Create(); override; + constructor Create(); procedure FitLoading(pNN: TNNet; TrainingCnt, ValidationCnt, TestCnt, pBatchSize, Epochs: integer; pGetTrainingPair, pGetValidationPair, pGetTestPair: TNNetGetPairFn); overload; @@ -231,7 +232,7 @@ TNeuralFit = class(TNeuralDataLoadingFit) function FitValidationPair(Idx: integer; ThreadId: integer): TNNetVolumePair; function FitTestPair(Idx: integer; ThreadId: integer): TNNetVolumePair; public - constructor Create(); override; + constructor Create(); destructor Destroy(); override; procedure Fit(pNN: TNNet; @@ -280,7 +281,7 @@ TNeuralImageFit = class(TNeuralFitWithImageBase) FIsSoftmax: boolean; FTrainingSampleProcessedCnt: TNNetVolume; public - constructor Create(); override; + constructor Create(); destructor Destroy(); override; procedure Fit(pNN: TNNet; @@ -602,7 +603,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, ' Inertia:' + FloatToStrF(FInertia,ffFixed,8,6) + ' Batch size:' + IntToStr(FBatchSize) + ' Step size:' + IntToStr(FStepSize) + - ' Staircase ephocs:' + IntToStr(FStaircaseEpochs) + ' Staircase epochs:' + IntToStr(FStaircaseEpochs) ); if TrainingCnt > 0 then MessageProc('Training volumes: '+IntToStr(TrainingCnt)); if ValidationCnt > 0 then MessageProc('Validation volumes: '+IntToStr(ValidationCnt)); @@ -1370,6 +1371,7 @@ procedure TNeuralDataLoadingFit.RunTrainingBatch(); if FClipDelta > 0 then begin MaxDelta := FNN.ForceMaxAbsoluteDelta(FClipDelta); + MessageProc('Deltas have maxed to: '+FloatToStr(MaxDelta)); end else begin @@ -1749,7 +1751,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; ' Inertia:' + FloatToStrF(FInertia,ffFixed,8,6) + ' Batch size:' + IntToStr(FBatchSize) + ' Step size:' + IntToStr(FStepSize) + - ' Staircase ephocs:' + IntToStr(FStaircaseEpochs) + + ' Staircase epochs:' + IntToStr(FStaircaseEpochs) + ' Min backprop error:' + FloatToStrF(MinBackpropagationError,ffFixed,4,2) ); if Assigned(FImgVolumes) then MessageProc('Training images: '+IntToStr(FImgVolumes.Count)); @@ -1796,6 +1798,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; if FClipDelta > 0 then begin MaxDelta := FNN.ForceMaxAbsoluteDelta(FClipDelta); + MessageProc('Deltas have maxed to: '+FloatToStr(MaxDelta)); end else begin diff --git a/neural/neuralnetwork.pas b/neural/neuralnetwork.pas index 960f86f0..dbb81a69 100644 --- a/neural/neuralnetwork.pas +++ b/neural/neuralnetwork.pas @@ -102,10 +102,10 @@ TNNetNeuron = class (TMObject) FBiasWeight: TNeuralFloat; FBiasInertia: TNeuralFloat; FBiasDelta: TNeuralFloat; - public - constructor Create(); override; + constructor Create(); destructor Destroy(); override; + procedure Assign( neuron : TNNetNeuron ); procedure Fill(Value:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} procedure AddInertia(); {$IFDEF Release} inline; {$ENDIF} procedure UpdateWeights(Inertia:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} @@ -197,8 +197,10 @@ TNNetLayer = class(TMObject) procedure ApplyActivationFunctionToOutput(); virtual; procedure BuildArrNeurons(); procedure AfterWeightUpdate(); virtual; + protected + procedure Assign( layer : TNNetLayer ); public - constructor Create(); override; + constructor Create(); virtual; destructor Destroy(); override; {$IFDEF OpenCL} @@ -247,7 +249,7 @@ TNNetLayer = class(TMObject) procedure ForcePositiveWeights(); {$IFDEF Release} inline; {$ENDIF} procedure NormalizeWeights(VMax: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} function SaveDataToString(): string; virtual; - procedure LoadDataFromString(strData: string); virtual; + procedure LoadDataFromString(strData: string); function SaveStructureToString(): string; virtual; procedure SetBatchUpdate(pBatchUpdate: boolean); {$IFDEF Release} inline; {$ENDIF} procedure UpdateWeights(); {$IFDEF Release} inline; {$ENDIF} @@ -338,9 +340,9 @@ TNNetInputBase = class(TNNetLayer) // need to backpropagate errors up to the input, call EnableErrorCollection. TNNetInput = class(TNNetInputBase) public - constructor Create(pSize: integer); overload; - constructor Create(pSizeX, pSizeY, pDepth: integer); overload; - constructor Create(pSizeX, pSizeY, pDepth, pError: integer); overload; + constructor Create(pSize: integer); reintroduce; overload; + constructor Create(pSizeX, pSizeY, pDepth: integer); reintroduce; overload; + constructor Create(pSizeX, pSizeY, pDepth, pError: integer); reintroduce; overload; function EnableErrorCollection: TNNetInput; function DisableErrorCollection: TNNetInput; @@ -359,7 +361,7 @@ TNNetIdentity = class(TNNetLayer) /// This layer allows you to debug activation and backpropagation of an TNNetDebug = class(TNNetIdentity) public - constructor Create(hasForward, hasBackward: integer); overload; + constructor Create(hasForward, hasBackward: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; @@ -372,7 +374,7 @@ TNNetPad = class(TNNetLayer) FPadding: integer; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(Padding: integer); overload; + constructor Create(Padding: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; @@ -407,7 +409,7 @@ TNNetDigital = class(TNNetIdentity) FLowValue, FHighValue: TNeuralFloat; FMiddleDist: TNeuralFloat; public - constructor Create(LowValue, HighValue: integer); overload; + constructor Create(LowValue, HighValue: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; @@ -425,7 +427,7 @@ TNNetReLUL = class(TNNetReLUBase) private FScale, FLowLimit, FHighLimit: TNeuralFloat; public - constructor Create(LowLimit, HighLimit, Leakiness: integer); overload; + constructor Create(LowLimit, HighLimit, Leakiness: integer); reintroduce; overload; procedure Compute(); override; end; @@ -474,7 +476,7 @@ TNNetPower = class(TNNetReLUBase) private FPower: TNeuralFloat; public - constructor Create(iPower: integer); overload; + constructor Create(iPower: integer); reintroduce; overload; procedure Compute(); override; end; @@ -516,7 +518,7 @@ TNNetHyperbolicTangent = class(TNNetSigmoid) // learning but can also provoke overflows. TNNetMulLearning = class(TNNetIdentity) public - constructor Create(pMul: integer); overload; + constructor Create(pMul: integer); reintroduce; overload; procedure Backpropagate(); override; end; @@ -536,7 +538,7 @@ TNNetNegate = class(TNNetMulByConstant) /// This is an experimental layer. Do not use it. TNNetAddAndDiv = class(TNNetIdentity) public - constructor Create(pAdd, pDiv: integer); overload; + constructor Create(pAdd, pDiv: integer); reintroduce; overload; procedure Compute(); override; end; @@ -556,7 +558,7 @@ TNNetDropout = class(TNNetAddNoiseBase) private procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(Rate: double; OneMaskPerbatch: integer = 1); overload; + constructor Create(Rate: double; OneMaskPerbatch: integer = 1); reintroduce; overload; destructor Destroy(); override; procedure Compute(); override; procedure Backpropagate(); override; @@ -573,7 +575,7 @@ TNNetRandomMulAdd = class(TNNetAddNoiseBase) protected FRandomBias, FRandomMul: TNeuralFloat; public - constructor Create(AddRate, MulRate: integer); overload; + constructor Create(AddRate, MulRate: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; @@ -584,7 +586,7 @@ TNNetChannelRandomMulAdd = class(TNNetAddNoiseBase) protected FRandomBias, FRandomMul: TNNetVolume; public - constructor Create(AddRate, MulRate: integer); overload; + constructor Create(AddRate, MulRate: integer); reintroduce; overload; destructor Destroy; override; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; procedure Compute(); override; @@ -668,8 +670,8 @@ TNNetChannelMulByLayer = class(TNNetChannelTransformBase) FLayerWithChannels, FLayerMul: TNNetLayer; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(LayerWithChannels, LayerMul: TNNetLayer); overload; - constructor Create(LayerWithChannelsIdx, LayerMulIdx: integer); overload; + constructor Create(LayerWithChannels, LayerMul: TNNetLayer); reintroduce; overload; + constructor Create(LayerWithChannelsIdx, LayerMulIdx: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; @@ -682,8 +684,8 @@ TNNetCellMulByCell = class(TNNetChannelTransformBase) FLayerA, FLayerB: TNNetLayer; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(LayerA, LayerB: TNNetLayer); overload; - constructor Create(LayerAIdx, LayerBIdx: integer); overload; + constructor Create(LayerA, LayerB: TNNetLayer); reintroduce; overload; + constructor Create(LayerAIdx, LayerBIdx: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; @@ -742,7 +744,7 @@ TNNetLocalResponseNorm2D = class(TNNetIdentity) private FLRN: TNNetVolume; public - constructor Create(pSize: integer); overload; + constructor Create(pSize: integer); reintroduce; overload; destructor Destroy(); override; procedure Compute(); override; @@ -755,7 +757,7 @@ TNNetInterleaveChannels = class(TNNetIdentity) ToChannels: TNeuralIntegerArray; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(StepSize: integer); overload; + constructor Create(StepSize: integer); reintroduce; overload; destructor Destroy(); override; procedure Compute(); override; @@ -774,7 +776,7 @@ TNNetReshape = class(TNNetLayer) private procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(pSizeX, pSizeY, pDepth: integer); overload; + constructor Create(pSizeX, pSizeY, pDepth: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; @@ -800,8 +802,8 @@ TNNetConcatBase = class(TNNetLayer) // XY size. TNNetConcat = class(TNNetConcatBase) public - constructor Create(pSizeX, pSizeY, pDepth: integer; aL: array of TNNetLayer); overload; - constructor Create(aL: array of TNNetLayer); overload; + constructor Create(pSizeX, pSizeY, pDepth: integer; aL: array of TNNetLayer); reintroduce; overload; + constructor Create(aL: array of TNNetLayer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; @@ -815,7 +817,7 @@ TNNetDeepConcat = class(TNNetConcatBase) FDeepsChannel: TNeuralIntegerArray; FRemainingChannels: TNeuralIntegerArray; public - constructor Create(aL: array of TNNetLayer); overload; + constructor Create(aL: array of TNNetLayer); reintroduce; overload; destructor Destroy(); override; procedure Compute(); override; @@ -825,7 +827,7 @@ TNNetDeepConcat = class(TNNetConcatBase) /// This layer sums layers of same size allowing resnet style layers. TNNetSum = class(TNNetConcatBase) public - constructor Create(aL: array of TNNetLayer); overload; + constructor Create(aL: array of TNNetLayer); reintroduce; overload; destructor Destroy(); override; procedure Compute(); override; @@ -838,8 +840,8 @@ TNNetSplitChannels = class(TNNetLayer) FChannels: TNeuralIntegerArray; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(ChannelStart, ChannelLen: integer); overload; - constructor Create(pChannels: array of integer); overload; + constructor Create(ChannelStart, ChannelLen: integer); reintroduce; overload; + constructor Create(pChannels: array of integer); reintroduce; overload; destructor Destroy(); override; procedure Compute(); override; @@ -863,8 +865,8 @@ TNNetFullConnect = class(TNNetLayerConcatedWeights) procedure ComputePreviousLayerError(); override; procedure ComputePreviousLayerErrorCPU(); virtual; public - constructor Create(pSizeX, pSizeY, pDepth: integer; pSuppressBias: integer = 0); overload; virtual; - constructor Create(pSize:integer; pSuppressBias: integer = 0); overload; + constructor Create(pSizeX, pSizeY, pDepth: integer; pSuppressBias: integer = 0); reintroduce; overload; virtual; + constructor Create(pSize:integer; pSuppressBias: integer = 0); reintroduce; overload; procedure Compute(); override; procedure ComputeCPU(); virtual; procedure Backpropagate(); override; @@ -953,14 +955,14 @@ TNNetConvolutionAbstract = class(TNNetLayerConcatedWeights) procedure RefreshCalculatePrevLayerError(); procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); overload; + constructor Create(pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); reintroduce; overload; destructor Destroy(); override; procedure InitDefault(); override; end; /// This class does a depthwise convolution. TNNetDepthwiseConv = class(TNNetConvolutionAbstract) - private + protected procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; procedure BackpropagateCPU(); {$IFDEF Release} inline; {$ENDIF} procedure BackpropagateCPUFast(); @@ -987,6 +989,12 @@ TNNetDepthwiseConvReLU = class(TNNetDepthwiseConv) constructor Create(pMultiplier, pFeatureSize, pInputPadding, pStride: integer); override; end; + TNNetDepthwiseConvGeLU = class(TNNetDepthwiseConv) + public + constructor Create(pMultiplier, pFeatureSize, pInputPadding, pStride: integer); override; + end; + + /// This is a base class. Do not use it directly. TNNetConvolutionBase = class(TNNetConvolutionAbstract) private @@ -998,6 +1006,7 @@ TNNetConvolutionBase = class(TNNetConvolutionAbstract) FMaxTileX, FMaxTileD: integer; FTileSizeX, FTileSizeD: integer; + protected {$IFDEF Debug} procedure PrepareInputForConvolution(); overload; {$IFDEF Release} inline; {$ENDIF} procedure PrepareInputForConvolution(OutputX, OutputY: integer); overload; {$IFDEF Release} inline; {$ENDIF} @@ -1026,6 +1035,7 @@ TNNetGroupedConvolutionLinear = class(TNNetConvolutionBase) procedure PrepareInputForGroupedConvolutionFast(); procedure ComputeCPU(); procedure BackpropagateCPU(); + protected procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public constructor Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pGroups: integer; pSuppressBias: integer = 0); overload; virtual; @@ -1059,7 +1069,6 @@ TNNetGroupedPointwiseConvReLU = class(TNNetGroupedPointwiseConvLinear) TNNetConvolution = class(TNNetConvolutionBase) protected procedure BackpropagateAtOutputPos(pCanBackpropOnPos: boolean; OutputRawPos, OutputX, OutputY, OutputD, PrevX, PrevY: integer); {$IFDEF Release} inline; {$ENDIF} - private procedure ComputeCPU(); procedure ComputeTiledCPU(); procedure ComputeInterleaved(); @@ -1068,6 +1077,7 @@ TNNetConvolution = class(TNNetConvolutionBase) procedure BackpropagateFastTiledCPU(); procedure BackpropagateFastCPUDev(); // Backprop CPU development version (do not use it) + protected {$IFDEF OpenCL} procedure ComputeOpenCL(); {$ENDIF} @@ -1108,6 +1118,21 @@ TNNetConvolutionReLU = class(TNNetConvolution) constructor Create(pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); override; end; + // convolutional layer with GeLU activation function + TNNetConvolutionGeLU = class(TNNetConvolution) + public + constructor Create(pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); override; + end; + + TNNetConvolutionSwish6 = class(TNNetConvolution) + public + constructor Create(pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); override; + end; + TNNetConvolutionSwish = class(TNNetConvolution) + public + constructor Create(pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); override; + end; + /// Pointwise convolution with tanh activation. TNNetPointwiseConv = class(TNNetConvolution) public @@ -1140,7 +1165,7 @@ TNNetDeconvolutionReLU = class(TNNetConvolutionReLU) { TNNetLocalConnect } TNNetLocalConnect = class(TNNetConvolutionBase) - private + protected procedure BackpropagateAtOutputPos(OutputX, OutputY, OutputD: integer); {$IFDEF Release} inline; {$ENDIF} procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public @@ -1153,7 +1178,7 @@ TNNetLocalConnect = class(TNNetConvolutionBase) { TNNetLocalProduct } // This is an experimental layer. Do not use it yet. TNNetLocalProduct = class(TNNetConvolutionBase) - private + protected procedure BackpropagateAtOutputPos(OutputX, OutputY, OutputD: integer); {$IFDEF Release} inline; {$ENDIF} procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public @@ -1196,7 +1221,7 @@ TNNetPoolBase = class(TNNetLayer) procedure BackpropagateWithStride(); procedure ComputePreviousLayerError(); override; public - constructor Create(pPoolSize: integer; pStride:integer = 0; pPadding: integer = 0); overload; + constructor Create(pPoolSize: integer; pStride:integer = 0; pPadding: integer = 0); reintroduce; overload; virtual; destructor Destroy(); override; procedure Backpropagate(); override; end; @@ -1287,6 +1312,7 @@ TNNetUpsample = class(TNNetDeMaxPool) TNNetDeAvgPool = class(TNNetDeMaxPool); /// neural network + TTNNetProgress = procedure(Sender : TObject; numCalc, numTotal : integer; var cancel : boolean ) of Object; TNNet = class(TMObject) protected FLayers: TNNetLayerList; @@ -1298,8 +1324,11 @@ TNNet = class(TMObject) {$IFDEF OpenCL} FDotProductKernel: TDotProductKernel; {$ENDIF} + fProgress : TTNNetProgress; public - constructor Create(); override; + property OnProgress : TTNNetProgress read fProgress write fProgress; // called in Compute! + public + constructor Create(); destructor Destroy(); override; function CreateLayer(strData: string): TNNetLayer; @@ -1481,7 +1510,6 @@ TNNet = class(TMObject) // custom layers support function ShouldIncDepartingBranchesCnt(pLayer: TNNetLayer):boolean; virtual; - published property BackwardTime: double read FBackwardTime write FBackwardTime; property ForwardTime: double read FForwardTime write FForwardTime; property Layers: TNNetLayerList read FLayers; @@ -1622,7 +1650,7 @@ TNNetByteProcessing = class(TNNetIdentity) FActionBytes: array of byte; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public - constructor Create(CacheSize, TestCount, OperationCount: integer); overload; + constructor Create(CacheSize, TestCount, OperationCount: integer); reintroduce; overload; destructor Destroy; override; procedure Compute(); override; procedure Backpropagate(); override; @@ -1633,7 +1661,7 @@ TNNetForByteProcessing = class(TNNet) private FInput, FOutput: TNNetVolume; public - constructor Create(); override; + constructor Create(); destructor Destroy(); override; procedure AddBasicByteProcessingLayers(InputByteCount, OutputByteCount: integer; @@ -2098,7 +2126,7 @@ procedure TNNetGroupedConvolutionLinear.PrepareInputForGroupedConvolutionFast(); ChannelsPerGroup, ChannelsPerGroupSize: integer; yCount, xCount, groupCount: integer; InputX, InputY: integer; - RowSize: integer; + //RowSize: integer; FeatureSizeXYD: integer; {$IFDEF AVXANY} SourceRawPos, DestRawPos: pointer; @@ -2111,7 +2139,7 @@ procedure TNNetGroupedConvolutionLinear.PrepareInputForGroupedConvolutionFast(); else begin ChannelsPerGroup := FInputCopy.Depth div FStruct[5]; - RowSize := ChannelsPerGroup; + //RowSize := ChannelsPerGroup; ChannelsPerGroupSize := ChannelsPerGroup * SizeOf(TNeuralFloat); MaxX := FOutput.SizeX - 1; MaxY := FOutput.SizeY - 1; @@ -2178,7 +2206,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); var OutputX, OutputY, OutputD: integer; MaxX, MaxY, MaxD: integer; - GroupId, GroupDSize, GroupDStart: integer; + (*GroupId, *)GroupDSize, GroupDStart: integer; PrevX, PrevY: integer; OutputRawPos: integer; CanBackpropOnPos: boolean; @@ -2192,7 +2220,9 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); //PrevNumElements: integer; MissedElements: integer; //, PrevMissedElements: integer; + {$IFDEF AVX64} PtrNeuronDelta: TNeuralFloatArrPtr; + {$ENDIF} PtrPreparedInput: TNeuralFloatArrPtr; //PrevPtrA, PrevPtrB: TNeuralFloatArrPtr; NeuronWeights: integer; @@ -2205,6 +2235,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); MaxX := OutputError.SizeX - 1; MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; + LocalDestPtr := nil; // Debug code: FOutputError.ForceMaxAbs(1); GroupDSize := OutputError.Depth div FStruct[5]; LocalPrevError := FPrevLayer.OutputError; @@ -2238,7 +2269,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY, StartTileD); for OutputD := StartTileD to EndTileD do begin - GroupId := FArrGroupId[OutputD]; + //GroupId := FArrGroupId[OutputD]; GroupDStart := FArrGroupIdStart[OutputD]; if (FCalculatePrevLayerError and CanBackpropOnPos) then LocalDestPtr := LocalPrevError.GetRawPtr(PrevX, PrevY, GroupDStart); @@ -2976,25 +3007,23 @@ procedure TNNetLocalProduct.Compute(); procedure TNNetLocalProduct.ComputeCPU(); var OutputCntX, OutputCntY, OutputCntD: integer; - InputCntX, InputCntY: integer; + //InputCntX, InputCntY: integer; MaxX, MaxY, MaxD: integer; LocalSize: integer; PtrA: TNeuralFloatArrPtr; OutputIdx: integer; Product: TNeuralFloat; - CntXYD: integer; begin MaxX := FOutput.SizeX - 1; MaxY := FOutput.SizeY - 1; MaxD := FOutput.Depth - 1; LocalSize := FFeatureSizeX*FFeatureSizeY*FInputCopy.Depth; - InputCntX := 0; + // InputCntX := 0; OutputCntX := 0; - CntXYD := 0; while OutputCntX <= MaxX do begin - InputCntY := 0; + //InputCntY := 0; OutputCntY := 0; while OutputCntY <= MaxY do begin @@ -3009,12 +3038,11 @@ procedure TNNetLocalProduct.ComputeCPU(); FOutputRaw.FData[OutputIdx] := Product; FOutput.FData[OutputIdx] := Product; Inc(OutputCntD); - Inc(CntXYD); end; - Inc(InputCntY, FStride); + // Inc(InputCntY, FStride); Inc(OutputCntY); end; - Inc(InputCntX, FStride); + // Inc(InputCntX, FStride); Inc(OutputCntX); end; (* @@ -3616,6 +3644,15 @@ constructor TNNetDepthwiseConvReLU.Create(pMultiplier, pFeatureSize, FActivationFnDerivative := @RectifiedLinearUnitDerivative; end; +{ TNNetDepthwiseConvGeLU } +constructor TNNetDepthwiseConvGeLU.Create(pMultiplier, pFeatureSize, + pInputPadding, pStride: integer); +begin + inherited Create(pMultiplier, pFeatureSize, pInputPadding, pStride); + FActivationFn := @GaussErrorLinUnit; + FActivationFnDerivative := @GaussErrorLinUnitDerivative; +end; + { TNNetDepthwiseConvLinear } constructor TNNetDepthwiseConvLinear.Create(pMultiplier, pFeatureSize, pInputPadding, pStride: integer); @@ -4893,7 +4930,13 @@ procedure TNNetLayerConcatedWeights.RefreshNeuronWeightList(); procedure TNNetConvolutionBase.EnableOpenCL(DotProductKernel: TDotProductKernel); begin inherited EnableOpenCL(DotProductKernel); - FDotCL.PrepareForCompute(FConcatedWInter, FInputPrepared, FVectorSize); + + // fDotCL is not assigned in case fShouldOpenCL is false + if Assigned(FDotCL) + then + FDotCL.PrepareForCompute(FConcatedWInter, FInputPrepared, FVectorSize) + else + FHasOpenCL := False; end; procedure TNNetLayerConcatedWeights.EnableOpenCL( @@ -6492,7 +6535,6 @@ procedure TestConvolutionAPI(); NN: THistoricalNets; NN2: TNNet; AuxVolume: TNNetVolume; - I: integer; begin NN := THistoricalNets.Create(); AuxVolume := TNNetVolume.Create; @@ -7334,7 +7376,7 @@ procedure TNNetDeMaxPool.ComputePreviousLayerError(); RawPos, PrevRawPos: integer; PrevPosX, PrevPosY: integer; floatPoolSize: TNeuralFloat; - OutX, OutY: integer; +// OutX, OutY: integer; begin MaxD := Output.Depth - 1; @@ -7525,13 +7567,13 @@ procedure TNNetReshape.Compute; procedure TNNetReshape.Backpropagate; var - Len: integer; +// Len: integer; StartTime: double; begin StartTime := Now(); Inc(FBackPropCallCurrentCnt); if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; - Len := Min(FOutput.Size, FPrevLayer.FOutput.Size); +// Len := Min(FOutput.Size, FPrevLayer.FOutput.Size); //TODO: check this for possible crash. FPrevLayer.FOutputError.Add(FOutputError); FBackwardTime := FBackwardTime + (Now() - StartTime); @@ -7683,26 +7725,21 @@ procedure TNNetLocalConnect.Compute(); procedure TNNetLocalConnect.ComputeCPU(); var OutputCntX, OutputCntY, OutputCntD: integer; - InputCntX, InputCntY: integer; MaxX, MaxY, MaxD: integer; LocalSize: integer; LocalW: TNNetVolume; PtrA, PtrB: TNeuralFloatArrPtr; NeuronIdx: integer; Sum: TNeuralFloat; - CntXYD: integer; begin MaxX := FOutput.SizeX - 1; MaxY := FOutput.SizeY - 1; MaxD := FOutput.Depth - 1; LocalSize := FFeatureSizeX*FFeatureSizeY*FInputCopy.Depth; - InputCntX := 0; OutputCntX := 0; - CntXYD := 0; while OutputCntX <= MaxX do begin - InputCntY := 0; OutputCntY := 0; while OutputCntY <= MaxY do begin @@ -7720,12 +7757,9 @@ procedure TNNetLocalConnect.ComputeCPU(); FOutputRaw.FData[NeuronIdx] := Sum; FOutput.FData[NeuronIdx] := FActivationFn(Sum); Inc(OutputCntD); - Inc(CntXYD); end; - Inc(InputCntY, FStride); Inc(OutputCntY); end; - Inc(InputCntX, FStride); Inc(OutputCntX); end; end; @@ -7904,6 +7938,34 @@ constructor TNNetConvolutionReLU.Create(pNumFeatures, pFeatureSize, FActivationFnDerivative := @RectifiedLinearUnitDerivative; end; +{ TNNetConvolutionGeLU } + +constructor TNNetConvolutionGeLU.Create(pNumFeatures, pFeatureSize, + pInputPadding, pStride: integer; pSuppressBias: integer = 0); +begin + inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); + FActivationFn := @GaussErrorLinUnit; + FActivationFnDerivative := @GaussErrorLinUnitDerivative; +end; + +{ TNNetConvolutionSwish6 } + +constructor TNNetConvolutionSwish6.Create(pNumFeatures, pFeatureSize, + pInputPadding, pStride, pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); + FActivationFn := @Swish6Unit; + FActivationFnDerivative := @Swish6Derivative; +end; + +constructor TNNetConvolutionSwish.Create(pNumFeatures, pFeatureSize, + pInputPadding, pStride, pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); + FActivationFn := @SwishUnit; + FActivationFnDerivative := @SwishDerivative; +end; + { TNNetPoolBase } procedure TNNetPoolBase.SetPrevLayer(pPrevLayer: TNNetLayer); var @@ -8424,7 +8486,7 @@ procedure TNNetConvolutionBase.PrepareInputForConvolutionFast(); DepthFSize, SizeOfDepthFSize: integer; yCount: integer; InputX: integer; - RowSize: integer; + //RowSize: integer; {$IFDEF AVXANY} SourceRawPos, DestRawPos: pointer; {$ENDIF} @@ -8436,7 +8498,7 @@ procedure TNNetConvolutionBase.PrepareInputForConvolutionFast(); else begin DepthFSize := FInputCopy.Depth * FFeatureSizeX; - RowSize := DepthFSize; + //RowSize := DepthFSize; SizeOfDepthFSize := DepthFSize * SizeOf(TNeuralFloat); MaxX := FOutput.SizeX - 1; MaxY := FOutput.SizeY - 1; @@ -8651,28 +8713,30 @@ procedure TNNetConvolution.BackpropagateFastCPU(); SmoothLocalOutputErrorDeriv: TNeuralFloat; LocalWeight, LocalPrevError: TNNetVolume; {SrcPtr,} LocalDestPtr: TNeuralFloatArrPtr; - SmoothLocalOutputErrorDerivPtr: pointer; - PrevNumElements, PrevMissedElements: integer; - PtrNeuronDelta, PtrPreparedInput: TNeuralFloatArrPtr; +// SmoothLocalOutputErrorDerivPtr: pointer; +// PrevNumElements, PrevMissedElements: integer; + {$IFDEF AVX64}PtrNeuronDelta, {$ENDIF} PtrPreparedInput: TNeuralFloatArrPtr; PrevPtrA, PrevPtrB: TNeuralFloatArrPtr; - NeuronWeights: integer; - LocalLearningErrorDerivPtr: pointer; - localNumElements, MissedElements: integer; +// NeuronWeights: integer; +// LocalLearningErrorDerivPtr: pointer; +// localNumElements : Integer; +// MissedElements: integer; MaxPrevX, MaxPrevY: integer; begin MaxX := OutputError.SizeX - 1; MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; + LocalDestPtr := nil; MaxPrevX := 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX; MaxPrevY := 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY; LocalPrevError := FPrevLayer.OutputError; - PrevNumElements := (FSizeXDepth div 4) * 4; - PrevMissedElements := FSizeXDepth - PrevNumElements; - NeuronWeights := FArrNeurons[0].Delta.Size; - localNumElements := (NeuronWeights div 4) * 4; - MissedElements := NeuronWeights - localNumElements; - SmoothLocalOutputErrorDerivPtr := Addr(SmoothLocalOutputErrorDeriv); - LocalLearningErrorDerivPtr := Addr(LocalLearningErrorDeriv); +// PrevNumElements := (FSizeXDepth div 4) * 4; +// PrevMissedElements := FSizeXDepth - PrevNumElements; +// NeuronWeights := FArrNeurons[0].Delta.Size; +// localNumElements := (NeuronWeights div 4) * 4; +// MissedElements := NeuronWeights - localNumElements; +// SmoothLocalOutputErrorDerivPtr := Addr(SmoothLocalOutputErrorDeriv); +// LocalLearningErrorDerivPtr := Addr(LocalLearningErrorDeriv); begin for OutputY := 0 to MaxY do begin @@ -8827,7 +8891,7 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); {SrcPtr,} LocalDestPtr: TNeuralFloatArrPtr; SmoothLocalOutputErrorDerivPtr: pointer; PrevNumElements, PrevMissedElements: integer; - PtrNeuronDelta, PtrPreparedInput: TNeuralFloatArrPtr; + {$IFDEF AVX64}PtrNeuronDelta, {$ENDIF} PtrPreparedInput: TNeuralFloatArrPtr; PrevPtrA, PrevPtrB: TNeuralFloatArrPtr; NeuronWeights: integer; LocalLearningErrorDerivPtr: pointer; @@ -8840,6 +8904,7 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); MaxX := OutputError.SizeX - 1; MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; + LocalDestPtr := nil; MaxPrevX := 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX; MaxPrevY := 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY; LocalPrevError := FPrevLayer.OutputError; @@ -9014,7 +9079,7 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); {SrcPtr,} LocalDestPtr: TNeuralFloatArrPtr; SmoothLocalOutputErrorDerivPtr: pointer; PrevNumElements, PrevMissedElements: integer; - PtrNeuronDelta, PtrPreparedInput: TNeuralFloatArrPtr; + {$IFDEF AVX64}PtrNeuronDelta : TNeuralFloatArrPtr; {$ENDIF} PrevPtrA, PrevPtrB: TNeuralFloatArrPtr; NeuronWeights: integer; LocalLearningErrorDerivPtr: pointer; @@ -9029,6 +9094,7 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); MaxX := OutputError.SizeX - 1; MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; + LocalDestPtr := nil; MaxPrevX := 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX; MaxPrevY := 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY; LocalPrevError := FPrevLayer.OutputError; @@ -9048,7 +9114,7 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); PrevX := (OutputX*FStride)-FPadding; OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY); if (FCalculatePrevLayerError) then LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); - PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); + //PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); CanBackpropOnPos := (PrevX >= 0) and (PrevY >= 0) and (PrevX < MaxPrevX) and @@ -9213,8 +9279,8 @@ destructor TNNetConvolutionAbstract.Destroy(); procedure TNNetConvolutionAbstract.InitDefault(); {$IFDEF Debug} -var - MaxAbsW: TNeuralFloat; +//var +// MaxAbsW: TNeuralFloat; {$ENDIF} begin // Although Keras works better with Glorot, CAI seems to work better with He. @@ -9520,7 +9586,8 @@ procedure TNNetFullConnect.EnableOpenCL(DotProductKernel: TDotProductKernel); begin RefreshNeuronWeightList(); AfterWeightUpdate(); - FDotCL.PrepareForCompute(FConcatedWInter, FPrevLayer.FOutput, FVectorSize); + if FDotCL.PrepareForCompute(FConcatedWInter, FPrevLayer.FOutput, FVectorSize) <> CL_SUCCESS then + FreeAndNil(fDotCL); end; end; {$ENDIF} @@ -9826,185 +9893,97 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; end; end; - {$IFDEF FPC} - case S[0] of - 'TNNetInput' : Result := TNNetInput.Create(St[0], St[1], St[2], St[3]); - 'TNNetIdentity' : Result := TNNetIdentity.Create(); - 'TNNetDebug' : Result := TNNetDebug.Create(St[0], St[1]); - 'TNNetPad' : Result := TNNetPad.Create(St[0]); - 'TNNetIdentityWithoutBackprop': Result := TNNetIdentityWithoutBackprop.Create(); - 'TNNetReLU' : Result := TNNetReLU.Create(); - 'TNNetSwish' : Result := TNNetSwish.Create(); - 'TNNetSwish6' : Result := TNNetSwish6.Create(); - 'TNNetReLUSqrt': Result := TNNetReLUSqrt.Create(); - 'TNNetReLUL' : Result := TNNetReLUL.Create(St[0], St[1], St[2]); - 'TNNetReLU6' : Result := TNNetReLU6.Create(St[2]); - 'TNNetPower' : Result := TNNetPower.Create(St[0]); - 'TNNetSELU' : Result := TNNetSELU.Create(); - 'TNNetLeakyReLU' : Result := TNNetLeakyReLU.Create(); - 'TNNetVeryLeakyReLU' : Result := TNNetVeryLeakyReLU.Create(); - 'TNNetSigmoid' : Result := TNNetSigmoid.Create(); - 'TNNetHyperbolicTangent' : Result := TNNetHyperbolicTangent.Create(); - 'TNNetDropout' : Result := TNNetDropout.Create(1/St[0], St[1]); - 'TNNetReshape' : Result := TNNetReshape.Create(St[0], St[1], St[2]); - 'TNNetLayerFullConnect' : Result := TNNetFullConnect.Create(St[0], St[1], St[2], St[3]); - 'TNNetFullConnect' : Result := TNNetFullConnect.Create(St[0], St[1], St[2], St[3]); - 'TNNetFullConnectSigmoid': Result := TNNetFullConnectSigmoid.Create(St[0], St[1], St[2], St[3]); - 'TNNetFullConnectDiff' : Result := TNNetFullConnectDiff.Create(St[0], St[1], St[2], St[3]); - 'TNNetLayerFullConnectReLU' : Result := TNNetFullConnectReLU.Create(St[0], St[1], St[2], St[3]); - 'TNNetFullConnectReLU' : Result := TNNetFullConnectReLU.Create(St[0], St[1], St[2], St[3]); - 'TNNetFullConnectLinear' : Result := TNNetFullConnectLinear.Create(St[0], St[1], St[2], St[3]); - 'TNNetLocalConnect' : Result := TNNetLocalConnect.Create(St[0], St[1], St[2], St[3], St[4]); - 'TNNetLocalProduct' : Result := TNNetLocalProduct.Create(St[0], St[1], St[2], St[3], St[4]); - 'TNNetLocalConnectReLU' : Result := TNNetLocalConnectReLU.Create(St[0], St[1], St[2], St[3], St[4]); - 'TNNetMulLearning' : Result := TNNetMulLearning.Create(St[0]); - 'TNNetMulByConstant' : Result := TNNetMulByConstant.Create(St[0]); - 'TNNetNegate' : Result := TNNetNegate.Create(); - 'TNNetLayerSoftMax' : Result := TNNetSoftMax.Create(); - 'TNNetSoftMax' : Result := TNNetSoftMax.Create(); - 'TNNetConvolution' : Result := TNNetConvolution.Create(St[0], St[1], St[2], St[3], St[4]); - 'TNNetConvolutionReLU' : Result := TNNetConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[4]); - 'TNNetConvolutionLinear' : Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]); - 'TNNetGroupedConvolutionLinear' : Result := TNNetGroupedConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[5], St[4]); - 'TNNetGroupedConvolutionReLU' : Result := TNNetGroupedConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[5], St[4]); - 'TNNetGroupedPointwiseConvLinear' : Result := TNNetGroupedPointwiseConvLinear.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]); - 'TNNetGroupedPointwiseConvReLU' : Result := TNNetGroupedPointwiseConvReLU.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]); - 'TNNetConvolutionSharedWeights' : Result := TNNetConvolutionSharedWeights.Create(FLayers[St[5]]); - 'TNNetDepthwiseConv' : Result := TNNetDepthwiseConv.Create(St[0], St[1], St[2], St[3]); - 'TNNetDepthwiseConvReLU' : Result := TNNetDepthwiseConvReLU.Create(St[0], St[1], St[2], St[3]); - 'TNNetDepthwiseConvLinear' : Result := TNNetDepthwiseConvLinear.Create(St[0], St[1], St[2], St[3]); - 'TNNetPointwiseConv' : Result := TNNetPointwiseConv.Create(St[0], St[4]); - 'TNNetPointwiseConvReLU' : Result := TNNetPointwiseConvReLU.Create(St[0], St[4]); - 'TNNetPointwiseConvLinear' : Result := TNNetPointwiseConvLinear.Create(St[0], St[4]); - 'TNNetMaxPool' : Result := TNNetMaxPool.Create(St[0], St[1], St[2]); - 'TNNetMaxPoolPortable' : Result := TNNetMaxPoolPortable.Create(St[0], St[1], St[2]); - 'TNNetMinPool' : Result := TNNetMinPool.Create(St[0], St[1], St[2]); - 'TNNetAvgPool' : Result := TNNetAvgPool.Create(St[0]); - 'TNNetAvgChannel': Result := TNNetAvgChannel.Create(); - 'TNNetMaxChannel': Result := TNNetMaxChannel.Create(); - 'TNNetMinChannel': Result := TNNetMinChannel.Create(); - 'TNNetConcat' : Result := TNNetConcat.Create(aL); - 'TNNetDeepConcat' : Result := TNNetDeepConcat.Create(aL); - 'TNNetInterleaveChannels' : Result := TNNetInterleaveChannels.Create(St[0]); - 'TNNetSum' : Result := TNNetSum.Create(aL); - 'TNNetSplitChannels' : Result := TNNetSplitChannels.Create(aIdx); - 'TNNetSplitChannelEvery' : Result := TNNetSplitChannelEvery.Create(aIdx); - 'TNNetDeLocalConnect' : Result := TNNetDeLocalConnect.Create(St[0], St[1], St[4]); - 'TNNetDeLocalConnectReLU' : Result := TNNetDeLocalConnectReLU.Create(St[0], St[1], St[4]); - 'TNNetDeconvolution' : Result := TNNetDeconvolution.Create(St[0], St[1], St[4]); - 'TNNetDeconvolutionReLU' : Result := TNNetDeconvolutionReLU.Create(St[0], St[1], St[4]); - 'TNNetDeMaxPool' : Result := TNNetDeMaxPool.Create(St[0], St[7]); - 'TNNetDeAvgPool' : Result := TNNetDeAvgPool.Create(St[0]); - 'TNNetUpsample' : Result := TNNetUpsample.Create(); - 'TNNetLayerMaxNormalization': Result := TNNetLayerMaxNormalization.Create(); - 'TNNetLayerStdNormalization': Result := TNNetLayerStdNormalization.Create(); - 'TNNetMovingStdNormalization': Result := TNNetMovingStdNormalization.Create(); - 'TNNetChannelStdNormalization': Result := TNNetChannelStdNormalization.Create(); - 'TNNetScaleLearning' : Result := TNNetScaleLearning.Create(); - 'TNNetChannelBias': Result := TNNetChannelBias.Create(); - 'TNNetChannelMul': Result := TNNetChannelMul.Create(); - 'TNNetChannelMulByLayer': Result := TNNetChannelMulByLayer.Create(St[0], St[1]); - 'TNNetCellBias': Result := TNNetCellBias.Create(); - 'TNNetCellMul': Result := TNNetCellMul.Create(); - 'TNNetCellMulByCell': Result := TNNetCellMulByCell.Create(St[0], St[1]); - 'TNNetRandomMulAdd': Result := TNNetRandomMulAdd.Create(St[0], St[1]); - 'TNNetChannelRandomMulAdd': Result := TNNetChannelRandomMulAdd.Create(St[0], St[1]); - 'TNNetChannelZeroCenter': Result := TNNetChannelZeroCenter.Create(); - 'TNNetLocalResponseNorm2D': Result := TNNetLocalResponseNorm2D.Create(St[0]); - 'TNNetLocalResponseNormDepth':Result := TNNetLocalResponseNormDepth.Create(St[0]); - 'TNNetAddAndDiv' :Result := TNNetAddAndDiv.Create(St[0], St[1]); - else - raise Exception.create(strData + ' not allowed in CreateLayer.'); - end; - {$ELSE} - if S[0] = 'TNNetInput' then Result := TNNetInput.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetIdentity' then Result := TNNetIdentity.Create() else - if S[0] = 'TNNetDebug' then Result := TNNetDebug.Create(St[0], St[1]) else - if S[0] = 'TNNetPad' then Result := TNNetPad.Create(St[0]) else - if S[0] = 'TNNetIdentityWithoutBackprop' then Result := TNNetIdentityWithoutBackprop.Create() else - if S[0] = 'TNNetReLU' then Result := TNNetReLU.Create() else - if S[0] = 'TNNetSwish' then Result := TNNetSwish.Create() else - if S[0] = 'TNNetSwish6' then Result := TNNetSwish6.Create() else - if S[0] = 'TNNetReLUSqrt' then Result := TNNetReLUSqrt.Create() else - if S[0] = 'TNNetReLUL' then Result := TNNetReLUL.Create(St[0], St[1], St[2]) else - if S[0] = 'TNNetReLU6' then Result := TNNetReLU6.Create(St[2]) else - if S[0] = 'TNNetPower' then Result := TNNetPower.Create(St[0]) else - if S[0] = 'TNNetSELU' then Result := TNNetSELU.Create() else - if S[0] = 'TNNetLeakyReLU' then Result := TNNetLeakyReLU.Create() else - if S[0] = 'TNNetVeryLeakyReLU' then Result := TNNetVeryLeakyReLU.Create() else - if S[0] = 'TNNetSigmoid' then Result := TNNetSigmoid.Create() else - if S[0] = 'TNNetHyperbolicTangent' then Result := TNNetHyperbolicTangent.Create() else - if S[0] = 'TNNetDropout' then Result := TNNetDropout.Create(1/St[0], St[1]) else - if S[0] = 'TNNetReshape' then Result := TNNetReshape.Create(St[0], St[1], St[2]) else - if S[0] = 'TNNetLayerFullConnect' then Result := TNNetFullConnect.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetFullConnect' then Result := TNNetFullConnect.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetFullConnectSigmoid' then Result := TNNetFullConnectSigmoid.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetFullConnectDiff' then Result := TNNetFullConnectDiff.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetLayerFullConnectReLU' then Result := TNNetFullConnectReLU.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetFullConnectReLU' then Result := TNNetFullConnectReLU.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetFullConnectLinear' then Result := TNNetFullConnectLinear.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetLocalConnect' then Result := TNNetLocalConnect.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetLocalProduct' then Result := TNNetLocalProduct.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetLocalConnectReLU' then Result := TNNetLocalConnectReLU.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetMulLearning' then Result := TNNetMulLearning.Create(St[0]) else - if S[0] = 'TNNetMulByConstant' then Result := TNNetMulByConstant.Create(St[0]) else - if S[0] = 'TNNetNegate' then Result := TNNetNegate.Create() else - if S[0] = 'TNNetLayerSoftMax' then Result := TNNetSoftMax.Create() else - if S[0] = 'TNNetSoftMax' then Result := TNNetSoftMax.Create() else - if S[0] = 'TNNetConvolution' then Result := TNNetConvolution.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetConvolutionReLU' then Result := TNNetConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetConvolutionLinear' then Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetGroupedConvolutionLinear' then Result := TNNetGroupedConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else - if S[0] = 'TNNetGroupedConvolutionReLU' then Result := TNNetGroupedConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else - if S[0] = 'TNNetGroupedPointwiseConvLinear' then Result := TNNetGroupedPointwiseConvLinear.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else - if S[0] = 'TNNetGroupedPointwiseConvReLU' then Result := TNNetGroupedPointwiseConvReLU.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else - if S[0] = 'TNNetConvolutionSharedWeights' then Result := TNNetConvolutionSharedWeights.Create(FLayers[St[5]]) else - if S[0] = 'TNNetDepthwiseConv' then Result := TNNetDepthwiseConv.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetDepthwiseConvReLU' then Result := TNNetDepthwiseConvReLU.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetDepthwiseConvLinear' then Result := TNNetDepthwiseConvLinear.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetPointwiseConv' then Result := TNNetPointwiseConv.Create(St[0], St[4]) else - if S[0] = 'TNNetPointwiseConvReLU' then Result := TNNetPointwiseConvReLU.Create(St[0], St[4]) else - if S[0] = 'TNNetPointwiseConvLinear' then Result := TNNetPointwiseConvLinear.Create(St[0], St[4]) else - if S[0] = 'TNNetMaxPool' then Result := TNNetMaxPool.Create(St[0], St[1], St[2]) else - if S[0] = 'TNNetMaxPoolPortable' then Result := TNNetMaxPoolPortable.Create(St[0], St[1], St[2]) else - if S[0] = 'TNNetMinPool' then Result := TNNetMinPool.Create(St[0], St[1], St[2]) else - if S[0] = 'TNNetAvgPool' then Result := TNNetAvgPool.Create(St[0]) else - if S[0] = 'TNNetAvgChannel' then Result := TNNetAvgChannel.Create() else - if S[0] = 'TNNetMaxChannel' then Result := TNNetMaxChannel.Create() else - if S[0] = 'TNNetMinChannel' then Result := TNNetMinChannel.Create() else - if S[0] = 'TNNetConcat' then Result := TNNetConcat.Create(aL) else - if S[0] = 'TNNetInterleaveChannels' then Result := TNNetInterleaveChannels.Create(St[0]) else - if S[0] = 'TNNetDeepConcat' then Result := TNNetDeepConcat.Create(aL) else - if S[0] = 'TNNetSum' then Result := TNNetSum.Create(aL) else - if S[0] = 'TNNetSplitChannels' then Result := TNNetSplitChannels.Create(aIdx) else - if S[0] = 'TNNetSplitChannelEvery' then Result := TNNetSplitChannelEvery.Create(aIdx) else - if S[0] = 'TNNetDeLocalConnect' then Result := TNNetDeLocalConnect.Create(St[0], St[1], St[4]) else - if S[0] = 'TNNetDeLocalConnectReLU' then Result := TNNetDeLocalConnectReLU.Create(St[0], St[1], St[4]) else - if S[0] = 'TNNetDeconvolution' then Result := TNNetDeconvolution.Create(St[0], St[1], St[4]) else - if S[0] = 'TNNetDeconvolutionReLU' then Result := TNNetDeconvolutionReLU.Create(St[0], St[1], St[4]) else - if S[0] = 'TNNetDeMaxPool' then Result := TNNetDeMaxPool.Create(St[0], St[7]) else - if S[0] = 'TNNetDeAvgPool' then Result := TNNetDeAvgPool.Create(St[0]) else - if S[0] = 'TNNetUpsample' then Result := TNNetUpsample.Create() else - if S[0] = 'TNNetLayerMaxNormalization' then Result := TNNetLayerMaxNormalization.Create() else - if S[0] = 'TNNetLayerStdNormalization' then Result := TNNetLayerStdNormalization.Create() else - if S[0] = 'TNNetMovingStdNormalization' then Result := TNNetMovingStdNormalization.Create() else - if S[0] = 'TNNetChannelStdNormalization' then Result := TNNetChannelStdNormalization.Create() else - if S[0] = 'TNNetScaleLearning' then Result := TNNetChannelStdNormalization.Create() else - if S[0] = 'TNNetChannelBias' then Result := TNNetChannelBias.Create() else - if S[0] = 'TNNetChannelMul' then Result := TNNetChannelMul.Create() else - if S[0] = 'TNNetChannelMulByLayer' then Result := TNNetChannelMulByLayer.Create(St[0], St[1]) else - if S[0] = 'TNNetCellBias' then Result := TNNetCellBias.Create() else - if S[0] = 'TNNetCellMul' then Result := TNNetCellMul.Create() else - if S[0] = 'TNNetCellMulByCell' then Result := TNNetCellMulByCell.Create(St[0], St[1]) else - if S[0] = 'TNNetRandomMulAdd' then Result := TNNetRandomMulAdd.Create(St[0], St[1]) else - if S[0] = 'TNNetChannelRandomMulAdd' then Result := TNNetChannelRandomMulAdd.Create(St[0], St[1]) else - if S[0] = 'TNNetChannelZeroCenter' then Result := TNNetChannelZeroCenter.Create() else - if S[0] = 'TNNetLocalResponseNorm2D' then Result := TNNetLocalResponseNorm2D.Create(St[0]) else - if S[0] = 'TNNetLocalResponseNormDepth' then Result := TNNetLocalResponseNormDepth.Create(St[0]) else - if S[0] = 'TNNetAddAndDiv' then Result := TNNetAddAndDiv.Create(St[0], St[1]) else - raise Exception.create(strData + ' not allowed in CreateLayer.'); - {$ENDIF} + if S[0] = 'TNNetInput' then Result := TNNetInput.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetIdentity' then Result := TNNetIdentity.Create() else + if S[0] = 'TNNetDebug' then Result := TNNetDebug.Create(St[0], St[1]) else + if S[0] = 'TNNetPad' then Result := TNNetPad.Create(St[0]) else + if S[0] = 'TNNetIdentityWithoutBackprop' then Result := TNNetIdentityWithoutBackprop.Create() else + if S[0] = 'TNNetReLU' then Result := TNNetReLU.Create() else + if S[0] = 'TNNetSwish' then Result := TNNetSwish.Create() else + if S[0] = 'TNNetSwish6' then Result := TNNetSwish6.Create() else + if S[0] = 'TNNetReLUSqrt' then Result := TNNetReLUSqrt.Create() else + if S[0] = 'TNNetReLUL' then Result := TNNetReLUL.Create(St[0], St[1], St[2]) else + if S[0] = 'TNNetReLU6' then Result := TNNetReLU6.Create(St[2]) else + if S[0] = 'TNNetPower' then Result := TNNetPower.Create(St[0]) else + if S[0] = 'TNNetSELU' then Result := TNNetSELU.Create() else + if S[0] = 'TNNetLeakyReLU' then Result := TNNetLeakyReLU.Create() else + if S[0] = 'TNNetVeryLeakyReLU' then Result := TNNetVeryLeakyReLU.Create() else + if S[0] = 'TNNetSigmoid' then Result := TNNetSigmoid.Create() else + if S[0] = 'TNNetHyperbolicTangent' then Result := TNNetHyperbolicTangent.Create() else + if S[0] = 'TNNetDropout' then Result := TNNetDropout.Create(1/St[0], St[1]) else + if S[0] = 'TNNetReshape' then Result := TNNetReshape.Create(St[0], St[1], St[2]) else + if S[0] = 'TNNetLayerFullConnect' then Result := TNNetFullConnect.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetFullConnect' then Result := TNNetFullConnect.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetFullConnectSigmoid' then Result := TNNetFullConnectSigmoid.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetFullConnectDiff' then Result := TNNetFullConnectDiff.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetLayerFullConnectReLU' then Result := TNNetFullConnectReLU.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetFullConnectReLU' then Result := TNNetFullConnectReLU.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetFullConnectLinear' then Result := TNNetFullConnectLinear.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetLocalConnect' then Result := TNNetLocalConnect.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetLocalProduct' then Result := TNNetLocalProduct.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetLocalConnectReLU' then Result := TNNetLocalConnectReLU.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetMulLearning' then Result := TNNetMulLearning.Create(St[0]) else + if S[0] = 'TNNetMulByConstant' then Result := TNNetMulByConstant.Create(St[0]) else + if S[0] = 'TNNetNegate' then Result := TNNetNegate.Create() else + if S[0] = 'TNNetLayerSoftMax' then Result := TNNetSoftMax.Create() else + if S[0] = 'TNNetSoftMax' then Result := TNNetSoftMax.Create() else + if S[0] = 'TNNetConvolution' then Result := TNNetConvolution.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionReLU' then Result := TNNetConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionGeLU' then Result := TNNetConvolutionGeLU.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionSwish6' then Result := TNNetConvolutionSwish6.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionSwish' then Result := TNNetConvolutionSwish.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionLinear' then Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetGroupedConvolutionLinear' then Result := TNNetGroupedConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else + if S[0] = 'TNNetGroupedConvolutionReLU' then Result := TNNetGroupedConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else + if S[0] = 'TNNetGroupedPointwiseConvLinear' then Result := TNNetGroupedPointwiseConvLinear.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else + if S[0] = 'TNNetGroupedPointwiseConvReLU' then Result := TNNetGroupedPointwiseConvReLU.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else + if S[0] = 'TNNetConvolutionSharedWeights' then Result := TNNetConvolutionSharedWeights.Create(FLayers[St[5]]) else + if S[0] = 'TNNetDepthwiseConv' then Result := TNNetDepthwiseConv.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetDepthwiseConvReLU' then Result := TNNetDepthwiseConvReLU.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetDepthwiseConvGeLU' then Result := TNNetDepthwiseConvGeLU.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetDepthwiseConvLinear' then Result := TNNetDepthwiseConvLinear.Create(St[0], St[1], St[2], St[3]) else + if S[0] = 'TNNetPointwiseConv' then Result := TNNetPointwiseConv.Create(St[0], St[4]) else + if S[0] = 'TNNetPointwiseConvReLU' then Result := TNNetPointwiseConvReLU.Create(St[0], St[4]) else + if S[0] = 'TNNetPointwiseConvLinear' then Result := TNNetPointwiseConvLinear.Create(St[0], St[4]) else + if S[0] = 'TNNetMaxPool' then Result := TNNetMaxPool.Create(St[0], St[1], St[2]) else + if S[0] = 'TNNetMaxPoolPortable' then Result := TNNetMaxPoolPortable.Create(St[0], St[1], St[2]) else + if S[0] = 'TNNetMinPool' then Result := TNNetMinPool.Create(St[0], St[1], St[2]) else + if S[0] = 'TNNetAvgPool' then Result := TNNetAvgPool.Create(St[0]) else + if S[0] = 'TNNetAvgChannel' then Result := TNNetAvgChannel.Create() else + if S[0] = 'TNNetMaxChannel' then Result := TNNetMaxChannel.Create() else + if S[0] = 'TNNetMinChannel' then Result := TNNetMinChannel.Create() else + if S[0] = 'TNNetConcat' then Result := TNNetConcat.Create(aL) else + if S[0] = 'TNNetInterleaveChannels' then Result := TNNetInterleaveChannels.Create(St[0]) else + if S[0] = 'TNNetDeepConcat' then Result := TNNetDeepConcat.Create(aL) else + if S[0] = 'TNNetSum' then Result := TNNetSum.Create(aL) else + if S[0] = 'TNNetSplitChannels' then Result := TNNetSplitChannels.Create(aIdx) else + if S[0] = 'TNNetSplitChannelEvery' then Result := TNNetSplitChannelEvery.Create(aIdx) else + if S[0] = 'TNNetDeLocalConnect' then Result := TNNetDeLocalConnect.Create(St[0], St[1], St[4]) else + if S[0] = 'TNNetDeLocalConnectReLU' then Result := TNNetDeLocalConnectReLU.Create(St[0], St[1], St[4]) else + if S[0] = 'TNNetDeconvolution' then Result := TNNetDeconvolution.Create(St[0], St[1], St[4]) else + if S[0] = 'TNNetDeconvolutionReLU' then Result := TNNetDeconvolutionReLU.Create(St[0], St[1], St[4]) else + if S[0] = 'TNNetDeMaxPool' then Result := TNNetDeMaxPool.Create(St[0], St[7]) else + if S[0] = 'TNNetDeAvgPool' then Result := TNNetDeAvgPool.Create(St[0]) else + if S[0] = 'TNNetUpsample' then Result := TNNetUpsample.Create() else + if S[0] = 'TNNetLayerMaxNormalization' then Result := TNNetLayerMaxNormalization.Create() else + if S[0] = 'TNNetLayerStdNormalization' then Result := TNNetLayerStdNormalization.Create() else + if S[0] = 'TNNetMovingStdNormalization' then Result := TNNetMovingStdNormalization.Create() else + if S[0] = 'TNNetChannelStdNormalization' then Result := TNNetChannelStdNormalization.Create() else + if S[0] = 'TNNetScaleLearning' then Result := TNNetChannelStdNormalization.Create() else + if S[0] = 'TNNetChannelBias' then Result := TNNetChannelBias.Create() else + if S[0] = 'TNNetChannelMul' then Result := TNNetChannelMul.Create() else + if S[0] = 'TNNetChannelMulByLayer' then Result := TNNetChannelMulByLayer.Create(St[0], St[1]) else + if S[0] = 'TNNetCellBias' then Result := TNNetCellBias.Create() else + if S[0] = 'TNNetCellMul' then Result := TNNetCellMul.Create() else + if S[0] = 'TNNetCellMulByCell' then Result := TNNetCellMulByCell.Create(St[0], St[1]) else + if S[0] = 'TNNetRandomMulAdd' then Result := TNNetRandomMulAdd.Create(St[0], St[1]) else + if S[0] = 'TNNetChannelRandomMulAdd' then Result := TNNetChannelRandomMulAdd.Create(St[0], St[1]) else + if S[0] = 'TNNetChannelZeroCenter' then Result := TNNetChannelZeroCenter.Create() else + if S[0] = 'TNNetLocalResponseNorm2D' then Result := TNNetLocalResponseNorm2D.Create(St[0]) else + if S[0] = 'TNNetLocalResponseNormDepth' then Result := TNNetLocalResponseNormDepth.Create(St[0]) else + if S[0] = 'TNNetAddAndDiv' then Result := TNNetAddAndDiv.Create(St[0], St[1]) else + raise Exception.create(strData + ' not allowed in CreateLayer.'); end else begin @@ -10775,8 +10754,10 @@ procedure TNNet.Compute(pInput, pOutput: TNNetVolumeList; FromLayerIdx: integer var AuxOutput: TNNetVolume; MaxIdxInput, IdxInput: integer; + cancel : boolean; begin MaxIdxInput := pInput.Count - 1; + cancel := False; if MaxIdxInput >=0 then begin AuxOutput := TNNetVolume.Create(); @@ -10798,6 +10779,14 @@ procedure TNNet.Compute(pInput, pOutput: TNNetVolumeList; FromLayerIdx: integer begin MessageProc(IntToStr(IdxInput)+' processed.'); end; + + if Assigned(fProgress) then + begin + fProgress( self, idxInput, maxIdxInput + 1, cancel); + // check if the user wants to stop the loop + if cancel then + break; + end; end; AuxOutput.Free; end; @@ -11061,7 +11050,9 @@ procedure TNNet.CopyWeights(Origin: TNNet); function TNNet.ForceMaxAbsoluteDelta(vMax: TNeuralFloat): TNeuralFloat; var LayerCnt: integer; + {$IFDEF Debug} LayerMul: TNeuralFloat; + {$ENDIF} begin Result := 1; if FLayers.Count > 0 then @@ -11070,8 +11061,11 @@ function TNNet.ForceMaxAbsoluteDelta(vMax: TNeuralFloat): TNeuralFloat; begin if not(FLayers[LayerCnt].LinkedNeurons) then begin - LayerMul := FLayers[LayerCnt].ForceMaxAbsoluteDelta(vMax); + {$IFNDEF Debug} + FLayers[LayerCnt].ForceMaxAbsoluteDelta(vMax); + {$ENDIF} {$IFDEF Debug} + LayerMul := FLayers[LayerCnt].ForceMaxAbsoluteDelta(vMax); if LayerMul < Result then begin Result := LayerMul; @@ -11178,6 +11172,9 @@ procedure TNNet.EnableOpenCL(platform_id: cl_platform_id; LayerCnt: integer; begin FDotProductKernel := TDotProductCL.Create(platform_id, device_id); + FDotProductKernel.MessageProc := Self.MessageProc; + FDotProductKernel.ErrorProc := self.ErrorProc; + FDotProductKernel.Prepare; for LayerCnt := 0 to GetLastLayerIdx() do begin FLayers[LayerCnt].EnableOpenCL(FDotProductKernel); @@ -11678,13 +11675,29 @@ procedure TNNet.LoadFromFile(filename: string); end; function TNNet.Clone(): TNNet; -var - NNData: String; +//var NNData: String; +//begin +// NNData := SaveToString(); +// +// Result := TNNet.Create; +// Result.LoadFromString(NNData); +//end; + +var i : integer; + layStruct : string; begin - NNData := SaveToString(); + // it's not optimal but it covers the basis + layStruct := SaveStructureToString(); + + Result := TNNet.Create; + Result.LoadStructureFromString(layStruct); - Result := TNNet.Create; - Result.LoadFromString(NNData); + for i := 0 to FLayers.Count - 1 do + begin + // copy weights... basically reproduces LoadDataFromString but without all the overhead + Result.fLayers[i].Assign( FLayers[i] ); + Result.FLayers[i].AfterWeightUpdate; + end; end; procedure TNNet.LoadDataFromString(strData: string); @@ -11822,6 +11835,45 @@ procedure TNNetLayer.ApplyActivationFunctionToOutput(); end; end; +procedure TNNetLayer.Assign(layer: TNNetLayer); +var i : integer; +begin + // ########################################### + // #### Assign all elements common with the layer object + //FStruct := layer.FStruct; +// fOutput.Copy(layer.FOutput); +// FOutputRaw.Copy(layer.FOutputRaw); +// FOutputError.Copy(layer.FOutputError); +// FOutputErrorDeriv.Copy(FOutputErrorDeriv); +// FSuppressBias := layer.FSuppressBias; +// +// FNeurons := TNNetNeuronList.Create(); +// for i := 0 to layer.FNeurons.Count - 1 do +// fNeurons.Add(layer.FNeurons[i].Clone); +// +// AfterWeightUpdate; + + + assert( layer.FNeurons.Count = FNeurons.Count, 'neuron count does not check'); + for i := 0 to layer.FNeurons.Count - 1 do + TNNetNeuron(fNeurons[i]).Assign( layer.FNeurons[i] ); + + + //FLinkedNeurons := layer.FLinkedNeurons; +// FActivationFn := layer.FActivationFn; +// FActivationFnDerivative := layer.FActivationFnDerivative; +// FLearningRate := layer.FLearningRate; +// FL2Decay := layer.FL2Decay; +// //FPrevLayer := nil; +// FInertia := layer.FInertia; +// FBatchUpdate := layer.FBatchUpdate; +// FSmoothErrorPropagation := layer.FSmoothErrorPropagation; +// FDepartingBranchesCnt := layer.FDepartingBranchesCnt; +// FBackPropCallCurrentCnt := layer.FBackPropCallCurrentCnt; +// FBackwardTime := 0; +// FForwardTime := 0; +end; + procedure TNNetLayer.BuildArrNeurons(); var NeuronIdx: integer; @@ -13050,6 +13102,17 @@ procedure TNNetNeuron.ClearDelta; FBiasDelta := 0; end; +procedure TNNetNeuron.Assign(neuron: TNNetNeuron); +begin + FWeights.Copy( neuron.fWeights ); + FBackInertia.Copy(neuron.fBackInertia); + FDelta.Copy(neuron.FDelta); + + FBiasWeight := neuron.fBiasWeight; + FBiasInertia := neuron.FBiasInertia; + FBiasDelta := neuron.FBiasDelta; +end; + constructor TEasyBytePredictionViaNNet.Create(pActionByteLen, pStateByteLen: word; NumNeurons: integer; CacheSize: integer); @@ -13124,10 +13187,11 @@ procedure TBytePredictionViaNNet.Predict(var pActions, begin ABCopy(aActions, pActions); ABCopy(aCurrentState, pCurrentState); + idxCache := -1; if FUseCache then idxCache := FCache.Read(pActions, pPredictedState); Equal := ABCmp(pActions, pCurrentState); - if FUseCache and (idxCache <> -1) and Equal then + if (idxCache <> -1) and Equal then begin FCached := True; end diff --git a/neural/neuralopencl.pas b/neural/neuralopencl.pas index c2617e46..71c88250 100644 --- a/neural/neuralopencl.pas +++ b/neural/neuralopencl.pas @@ -35,7 +35,7 @@ interface uses - Classes, SysUtils, cl, {$IFDEF FPC}ctypes{$ELSE}Winapi.Windows,AnsiStrings,CL_Platform{$ENDIF}, neuralvolume; + Classes, SysUtils, cl, {$IFDEF FPC}ctypes{$ELSE}Windows,AnsiStrings,CL_Platform{$ENDIF}, neuralvolume; type {$IFDEF FPC} @@ -84,14 +84,14 @@ TEasyOpenCL = class(TMObject) FCompilerOptions: ShortString; {$ENDIF} - procedure LoadPlatforms(); procedure FreeContext(); procedure CompileProgram(); overload; public - constructor Create(); override; + constructor Create(); //override; destructor Destroy(); override; + procedure LoadPlatforms(); procedure printDevicesInfo(); function GetPlatformCount(): integer; function GetDeviceCount(): integer; @@ -160,11 +160,13 @@ TEasyOpenCLV = class (TEasyOpenCL) TNeuralKernel = class(TEasyOpenCLV) private + fkernelname : string; /// OpenCL Kernel FKernel: cl_kernel; function PrepareKernel(kernelname: string = 'cai_dot_product'): integer; procedure UnprepareKernel(); public + procedure Prepare; constructor Create(pCurrentPlatform: cl_platform_id; pCurrentDevice: cl_device_id; kernelname: string = 'cai_dot_product'); destructor Destroy(); override; @@ -396,7 +398,14 @@ function TDotProductSharedKernel.PrepareForCompute(VAs, VBs: TNNetVolume; FResultBuffer := FDotProductKernel.CreateOutputBuffer(FNumAs * FNumBs * SizeOf(TNeuralFloat)); FPreviousComputeTime := 0; - PrepareForCompute := CL_SUCCESS; + if Assigned(FResultBuffer) and Assigned(FInputBufferAs) and Assigned(FInputBufferBs) + then + PrepareForCompute := CL_SUCCESS + else + begin + UnprepareForCompute; + Result := CL_INVALID_MEM_OBJECT; + end; end; procedure TDotProductSharedKernel.Compute @@ -550,6 +559,39 @@ procedure TDotProductSharedKernel.FinishAndLoadResult(Results: TNNetVolume; end; end; +procedure TNeuralKernel.Prepare; +var resStream : TResourceStream; +begin + // ########################################### + // #### Check if the neural.cl file is part of the resources + try + resStream := TResourceStream.Create(hInstance, 'NeuralCL', RT_RCDATA); + FOpenCLProgramSource.LoadFromStream(resStream, TEncoding.UTF8); + + resStream.Free; + CompileProgram(); + PrepareKernel(fkernelname); + exit; + except + MessageProc('Resource NeuralCL not found - try to open file...'); + end; + + // Create the OpenCL Kernel Here: + if FileExists('../../../neural/neural.cl') then + begin + CompileProgramFromFile('../../../neural/neural.cl'); + end + else if FileExists('neural.cl') then + begin + CompileProgramFromFile('neural.cl'); + end + else + begin + MessageProc('File neural.cl could not be found.'); + end; + PrepareKernel(fkernelname); +end; + function TNeuralKernel.PrepareKernel(kernelname: string): integer; begin UnprepareKernel(); @@ -569,21 +611,7 @@ constructor TNeuralKernel.Create(pCurrentPlatform: cl_platform_id; inherited Create(); SetCurrentPlatform(pCurrentPlatform); SetCurrentDevice(pCurrentDevice); - - // Create the OpenCL Kernel Here: - if FileExists('../../../neural/neural.cl') then - begin - CompileProgramFromFile('../../../neural/neural.cl'); - end - else if FileExists('neural.cl') then - begin - CompileProgramFromFile('neural.cl'); - end - else - begin - MessageProc('File neural.cl could not be found.'); - end; - PrepareKernel(kernelname); + fkernelname := kernelname; end; destructor TNeuralKernel.Destroy(); @@ -834,6 +862,12 @@ function TEasyOpenCLV.WriteBuffer(buffer: cl_mem; V: TNNetVolume; blocking: cl_b function TEasyOpenCLV.ReadBuffer(buffer: cl_mem; V: TNNetVolume; blocking: cl_bool): integer; begin Result := ReadBuffer(buffer, V.GetMemSize(), V.DataPtr, blocking); + + if Result <> CL_SUCCESS then + begin + FErrorProc(Format( 'Error: %p, %p, %d', [buffer, V.DataPtr, V.GetMemSize ])); + end; + end; function TEasyOpenCLV.CreateAndWriteBuffer(V: TNNetVolume; var buffer: cl_mem @@ -899,11 +933,15 @@ procedure TEasyOpenCL.LoadPlatforms(); begin {$IFDEF FPC} err := clGetPlatformInfo(local_platformids[i], CL_PLATFORM_NAME, sizeof(buf), @buf, bufwritten); - FPlatformNames[i] := buf; + if err <> CL_SUCCESS then + FErrorProc('ERROR: ' + GetString(err) ); + FPlatformNames[i] := string(buf); FPlatformIds[i] := local_platformids[i]; {$ELSE} err := clGetPlatformInfo(local_platformids^, CL_PLATFORM_NAME, sizeof(buf), @buf, @bufwritten); - FPlatformNames[i] := buf; + if err <> CL_SUCCESS then + FErrorProc('ERROR: ' + String(GetString(err) ) ); + FPlatformNames[i] := string(buf); FPlatformIds[i] := local_platformids^; Inc(local_platformids); {$ENDIF} @@ -933,7 +971,11 @@ procedure TEasyOpenCL.CompileProgram(); {$IFDEF FPC} localKernelSource := FOpenCLProgramSource.GetText(); {$ELSE} + {$if CompilerVersion >= 23} localKernelSource := AnsiStrings.StrNew(PAnsiChar(AnsiString(FOpenCLProgramSource.Text))); + {$ELSE} + localKernelSource := PAnsiChar(AnsiString(FOpenCLProgramSource.Text)); + {$IFEND} {$ENDIF} // Create a compute context @@ -965,7 +1007,7 @@ procedure TEasyOpenCL.CompileProgram(); {$ENDIF} if FProg = nil then begin - FMessageProc(localKernelSource); + FMessageProc(String(localKernelSource)); FErrorProc('Error: Failed to create compute program:' + IntToStr(err)); exit; end @@ -973,19 +1015,19 @@ procedure TEasyOpenCL.CompileProgram(); FMessageProc('clCreateProgramWithSource OK!'); localCompilerOptions := {$IFDEF FPC}StrAlloc{$ELSE}AnsiStrAlloc{$ENDIF}(length(FCompilerOptions)+1); - {$IFDEF FPC}StrPCopy{$ELSE}AnsiStrings.StrPCopy{$ENDIF}(localCompilerOptions,FCompilerOptions); + {$IFDEF FPC}StrPCopy{$ELSE} {$IF CompilerVersion >= 23}AnsiStrings.StrPCopy{$ELSE}StrPCopy{$IFEND} {$ENDIF}(localCompilerOptions,FCompilerOptions); // Build the program executable err := clBuildProgram(FProg, 0, nil, localCompilerOptions, nil, nil); - {$IFDEF FPC}StrDispose{$ELSE}AnsiStrings.StrDispose{$ENDIF}(localCompilerOptions); + {$IFDEF FPC}StrDispose{$ELSE}{$IF CompilerVersion >= 23}AnsiStrings.StrDispose{$ELSE}StrDispose{$IFEND}{$ENDIF}(localCompilerOptions); if (err <> CL_SUCCESS) then begin errorlog := @errorlogstr[1]; loglen := SizeOf(errorlogstr); clGetProgramBuildInfo(FProg, FCurrentDevice, CL_PROGRAM_BUILD_LOG, SizeOf(errorlogstr), errorlog, {$IFDEF FPC}loglen{$ELSE}@loglen{$ENDIF}); - FErrorProc('Error: Failed to build program executable:' + IntToStr(err) + ' ' + errorlog); + FErrorProc('Error: Failed to build program executable:' + IntToStr(err) + ' ' + String(errorlog)); exit; end else @@ -1009,7 +1051,7 @@ procedure TEasyOpenCL.printDevicesInfo(); for k := low(platform_str_info) to high(platform_str_info) do begin clGetPlatformInfo(FPlatformIds[i], platform_str_info[k].id, sizeof(buf), @buf, {$IFDEF FPC}bufwritten{$ELSE}@bufwritten{$ENDIF}); - MessageProc(platform_str_info[k].Name + ': ' + buf); + MessageProc(platform_str_info[k].Name + ': ' + String(buf)); end; GetDevicesFromPlatform(FPlatformIds[i], local_devices, local_deviceids); @@ -1022,7 +1064,7 @@ procedure TEasyOpenCL.printDevicesInfo(); for k := low(device_str_info) to high(device_str_info) do begin clGetDeviceInfo(local_deviceids[j], device_str_info[k].id, sizeof(buf), @buf, {$IFDEF FPC}bufwritten{$ELSE}@bufwritten{$ENDIF}); - MessageProc(device_str_info[k].Name + ': ' + buf); + MessageProc(device_str_info[k].Name + ': ' + String(buf)); end; for k := low(device_word_info) to high(device_word_info) do @@ -1070,22 +1112,32 @@ procedure TEasyOpenCL.GetDevicesFromPlatform(PlatformId: cl_platform_id; out pDe firstpointer := local_deviceids; err := clGetDeviceIDs(PlatformId, CL_DEVICE_TYPE_ALL, local_devices, local_deviceids, nil); - if (local_devices > 0) then + if err = CL_SUCCESS then begin - for j := 0 to local_devices - 1 do + if (local_devices > 0) then begin - {$IFDEF FPC} - err := clGetDeviceInfo(local_deviceids[j], CL_DEVICE_NAME, sizeof(buf), @buf, bufwritten); - pDeviceNames[j] := buf; - pDevices[j] := local_deviceids[j]; - {$ELSE} - err := clGetDeviceInfo(local_deviceids^, CL_DEVICE_NAME, sizeof(buf), @buf, @bufwritten); - pDeviceNames[j] := buf; - pDevices[j] := local_deviceids^; - Inc(local_deviceids); - {$ENDIF} + for j := 0 to local_devices - 1 do + begin + {$IFDEF FPC} + err := clGetDeviceInfo(local_deviceids[j], CL_DEVICE_NAME, sizeof(buf), @buf, bufwritten); + if err = CL_SUCCESS then + FErrorProc('ERROR: ' + GetString(err)); + pDeviceNames[j] := buf; + pDevices[j] := local_deviceids[j]; + {$ELSE} + err := clGetDeviceInfo(local_deviceids^, CL_DEVICE_NAME, sizeof(buf), @buf, @bufwritten); + if err <> CL_SUCCESS then + FErrorProc('ERROR: ' + String(GetString(err))); + pDeviceNames[j] := String(buf); + pDevices[j] := local_deviceids^; + Inc(local_deviceids); + {$ENDIF} + end; end; - end; + end + else + FErrorProc('ERROR: ' + String(GetString( err ) )); + freemem(firstpointer); end; end; @@ -1224,7 +1276,7 @@ function TEasyOpenCL.CreateKernel(kernelname: string): cl_kernel; begin err := 0; localKernelName := {$IFDEF FPC}StrAlloc{$ELSE}AnsiStrAlloc{$ENDIF}(length(kernelname)+1); - {$IFDEF FPC}StrPCopy{$ELSE}AnsiStrings.StrPCopy{$ENDIF}(localKernelName,kernelname); + {$IFDEF FPC}StrPCopy{$ELSE}{$IF CompilerVersion >= 23}AnsiStrings.StrPCopy{$ELSE}StrPCopy{$IFEND}{$ENDIF}(localKernelName,AnsiString(kernelname)); // Create the compute kernel in the program we wish to run Result := clCreateKernel(prog, localKernelName, {$IFDEF FPC}err{$ELSE}@err{$ENDIF}); @@ -1236,7 +1288,7 @@ function TEasyOpenCL.CreateKernel(kernelname: string): cl_kernel; begin FMessageProc('clCreateKernel '+kernelname+' OK!'); end; - {$IFDEF FPC}StrDispose{$ELSE}AnsiStrings.StrDispose{$ENDIF}(localKernelName); + {$IFDEF FPC}StrDispose{$ELSE}{$IF CompilerVersion >= 23}AnsiStrings.StrDispose{$ELSE}StrDispose{$IFEND} {$ENDIF}(localKernelName); end; function TEasyOpenCL.RunKernel(pkernel: cl_kernel; ThreadCount: integer): integer; @@ -1369,7 +1421,6 @@ constructor TEasyOpenCL.Create(); MessageProc := Self.DefaultMessageProc; ErrorProc := Self.DefaultErrorProc; {$ENDIF} - LoadPlatforms(); SetLength(FDeviceNames, 0); SetLength(FDevices, 0); diff --git a/neural/neuralthread.pas b/neural/neuralthread.pas index 550ed06c..59761766 100644 --- a/neural/neuralthread.pas +++ b/neural/neuralthread.pas @@ -169,7 +169,7 @@ procedure CreateNeuralThreadListIfRequired(); begin if Not(Assigned(vNTL)) then begin - NeuralThreadListCreate(TThread.ProcessorCount); + NeuralThreadListCreate(System.CPUCount); end; end; @@ -178,7 +178,7 @@ function NeuralDefaultThreadCount: integer; {$IFDEF FPC} Result := GetSystemThreadCount; {$ELSE} - Result := TThread.ProcessorCount; + Result := System.CPUCount; {$ENDIF} end; diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index 32e9ad62..c52ba1d9 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -201,7 +201,7 @@ TVolume = class(TObject) procedure CopyChannels(Original: TVolume; aChannels: array of integer); procedure Define(Original: array of T); function DotProduct(Original: TVolume): T; overload; {$IFDEF Release} inline; {$ENDIF} - class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} + class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; class function Product(PtrA: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} function SumDiff(Original: TVolume): T; {$IFDEF Release} inline; {$ENDIF} procedure DebugDiff(Original: TVolume; Limit: Single = 0); @@ -321,9 +321,9 @@ TNNetVolume = class (TVolume) procedure DotProductsTiled(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); procedure GroupedDotProductsTiled(Groups, NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); procedure AddArea(DestX, DestY, OriginX, OriginY, LenX, LenY: integer; Original: TNNetVolume); - function HasAVX: boolean; {$IFDEF Release} inline; {$ENDIF} - function HasAVX2: boolean; {$IFDEF Release} inline; {$ENDIF} - function HasAVX512: boolean; {$IFDEF Release} inline; {$ENDIF} + function HasAVX: boolean; + function HasAVX2: boolean; + function HasAVX512: boolean; function PearsonCorrelation(Y : TNNetVolume): TNeuralFloat; procedure AddSumChannel(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} procedure AddSumSqrChannel(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} @@ -375,8 +375,7 @@ TNNetVolumePair = class(TObject) FB: TNNetVolume; public constructor Create(); overload; - constructor Create(pA, pB: TNNetVolume); overload; - constructor CreateCopying(pA, pB: TNNetVolume); overload; + constructor Create(pA, pB: TNNetVolume; createCopy : boolean = False); overload; destructor Destroy(); override; @@ -391,9 +390,8 @@ TMObject = class(TObject) protected FMessageProc: TGetStrProc; FErrorProc: TGetStrProc; - public - constructor Create(); virtual; + constructor Create(); //virtual; destructor Destroy(); override; procedure DefaultMessageProc(const S: string); @@ -401,7 +399,6 @@ TMObject = class(TObject) procedure DefaultHideMessages(const S: string); procedure HideMessages(); - published property MessageProc: TGetStrProc read FMessageProc write FMessageProc; property ErrorProc: TGetStrProc read FErrorProc write FErrorProc; end; @@ -558,6 +555,14 @@ TNNetDictionary = class(TStringListInt) function ReLULeakyBound(x: TNeuralFloat): TNeuralFloat; function ReLULeakyBoundDerivative(x: TNeuralFloat): TNeuralFloat; + function GaussErrorLinUnit(x : TNeuralFloat) : TNeuralFloat; + function GaussErrorLinUnitDerivative(x : TNeuralFloat) : TNeuralFloat; + + function Swish6Unit(x : TNeuralFloat) : TNeuralFloat; + function Swish6Derivative(x : TNeuralFloat) : TNeuralFloat; + function SwishUnit(x : TNeuralFloat) : TNeuralFloat; + function SwishDerivative(x : TNeuralFloat) : TNeuralFloat; + function Sigmoid(x: TNeuralFloat): TNeuralFloat; function SigmoidDerivative(x: TNeuralFloat): TNeuralFloat; @@ -599,8 +604,15 @@ TNNetDictionary = class(TStringListInt) implementation -uses - Math, neuralbit; +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} + +uses {$IFNDEF x64} Neural.AVX {$ELSE} Neural.AVXx64{$ENDIF}, neuralbit, + Math, CPUFeatures; function CreateTokenizedStringList(str: string; c:char):TStringList; begin @@ -1500,6 +1512,109 @@ function RectifiedLinearUnitDerivative(x: TNeuralFloat): TNeuralFloat; else Result := 0; end; +// paper: GAUSSIAN ERROR LINEAR UNITS (GELUS) Gimpel et al. 2018 +function GaussErrorLinUnit(x : TNeuralFloat) : TNeuralFloat; +const cSqrt_2_pi = 0.797884560803; +begin + // I define some calculational boundaries here -> + if x > 6 + then + Result := x + else if x < -4 + then + Result := 0 + else + Result := 0.5*x*(1 + tanh( cSqrt_2_pi*( x + 0.044715*x*x*x))); +end; + +function GaussErrorLinUnitDerivative(x : TNeuralFloat) : TNeuralFloat; +begin + // from https://mlfromscratch.com/activation-functions-explained/#/ + // plus the derrivatives higher than 5 + if x > 6.5 + then + Result := 1 + else if x < -5 + then + Result := 0 + else + Result := 0.5 + 0.5*tanh(0.0356774*x*x*x + 0.797885 * x) + (0.0535161*x*x*x + 0.398942*x)*sqr( sech(0.0356774*x*x*x + 0.797885*x) ) +end; + +function Swish6Unit(x : TNeuralFloat) : TNeuralFloat; +begin + if x < -6 then + begin + Result := 0; + end + else if x < 6 then + begin + Result := x* (1 / ( 1 + Exp(-x) )); + end + else + begin + // max out at 6 + Result := 6; + end; +end; + +function Swish6Derivative(x : TNeuralFloat) : TNeuralFloat; +var sigmoidValue : TNeuralFloat; +begin + if x < -6 then + begin + Result := 0; + end + else if x < 6 then + begin + sigmoidValue := (1 / ( 1 + Exp(-x) )); + Result := x*sigmoidValue; + Result := Result + sigmoidValue*(1 - Result); + end + else + begin + // max out at 6 + Result := 0; + end; +end; + +function SwishUnit(x : TNeuralFloat) : TNeuralFloat; +begin + if x < -6 then + begin + Result := 0; + end + else if x < 6 then + begin + Result := x* (1 / ( 1 + Exp(-x) )); + end + else + begin + // + Result := x; + end; +end; + +function SwishDerivative(x : TNeuralFloat) : TNeuralFloat; +var sigmoidValue : TNeuralFloat; +begin + if x < -6 then + begin + Result := 0; + end + else if x < 6 then + begin + sigmoidValue := (1 / ( 1 + Exp(-x) )); + Result := x*sigmoidValue; + Result := Result + sigmoidValue*(1 - Result); + end + else + begin + // + Result := 1; + end; +end; + constructor TNNetVolumePair.Create(); begin inherited Create(); @@ -1507,20 +1622,22 @@ constructor TNNetVolumePair.Create(); FB := TNNetVolume.Create(); end; -constructor TNNetVolumePair.Create(pA, pB: TNNetVolume); +constructor TNNetVolumePair.Create(pA, pB: TNNetVolume; createCopy : boolean = False); begin - inherited Create(); - FA := pA; - FB := pB; -end; + inherited Create; -constructor TNNetVolumePair.CreateCopying(pA, pB: TNNetVolume); -begin - inherited Create(); - FA := TNNetVolume.Create(pA); - FB := TNNetVolume.Create(pB); - FA.Copy(pA); - FB.Copy(pB); + if createCopy then + begin + FA := TNNetVolume.Create(pA); + FB := TNNetVolume.Create(pB); + FA.Copy(pA); + FB.Copy(pB); + end + else + begin + FA := pA; + FB := pB; + end; end; destructor TNNetVolumePair.Destroy(); @@ -1988,7 +2105,6 @@ function TNNetVolumeList.GetAvg(): TNeuralFloat; procedure TNNetVolumeList.AddValue(Value: TNeuralFloat); var I: integer; - AuxVolume: TNNetVolume; begin if (Count>0) then begin @@ -2002,7 +2118,6 @@ procedure TNNetVolumeList.AddValue(Value: TNeuralFloat); procedure TNNetVolumeList.Divi(Value: TNeuralFloat); var I: integer; - AuxVolume: TNNetVolume; begin if (Count>0) then begin @@ -2476,6 +2591,7 @@ function TVolume.RandomGaussianValue(): TNeuralFloat; r, x, y: TNeuralFloat; begin r := 0; + x := 0; // loop executed 4 / pi = 1.273.. times on average while ( (r > 1) or (r = 0) ) do begin @@ -3017,7 +3133,9 @@ class procedure TVolume.MulAddPPVS(PtrA, PtrB: TNeuralFloatArrPtr; Value: T; I: integer; vHigh: integer; BasePos: integer; + {$IFDEF FPC} AddrA, AddrB: TNeuralFloatPtr; + {$ENDIF} begin BasePos := 0; vHigh := pSize - 1; @@ -3070,7 +3188,9 @@ class procedure TVolume.MulMulAdd(PtrA, PtrB: TNeuralFloatArrPtr; Value1, I: integer; vHigh: integer; BasePos: integer; + {$IFDEF FPC} AddrA, AddrB: TNeuralFloatPtr; + {$ENDIF} begin BasePos := 0; vHigh := pSize - 1; @@ -3120,14 +3240,16 @@ class procedure TVolume.MulAdd(PtrA, PtrB, PtrC: TNeuralFloatArrPtr; I: integer; vHigh: integer; BasePos: integer; + {$IFDEF FPC} AddrA, AddrB, AddrC: TNeuralFloatPtr; + {$ENDIF} begin BasePos := 0; + vHigh := pSize - 1; + {$IFDEF FPC} AddrA := pointer(PtrA); AddrB := pointer(PtrB); AddrC := pointer(PtrC); - vHigh := pSize - 1; - {$IFDEF FPC} while BasePos <= vHigh - 7 do begin (AddrA)^ := (AddrA)^ + (AddrB)^ * (AddrC)^; @@ -4946,10 +5068,13 @@ procedure TVolume.LoadFromString(strData: string); I: integer; AuxFloat: Single; begin - version := 1; S := CreateTokenizedStringList(strData,';'); version := StrToInt(S[0]); + + if version <> 1 then + raise Exception.Create('Error V' + IntToStr(version) + ' found but V1.0 expected'); + pSizeX := StrToInt(S[1]); pSizeY := StrToInt(S[2]); pDepth := StrToInt(S[3]); @@ -5162,11 +5287,12 @@ procedure TNNetVolume.InterleavedDotProduct(InterleavedAs, Bs: TNNetVolume; procedure TNNetVolume.DotProducts(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume); var - CntA, CntB, CntAPos, CntBPos, MaxA, MaxB: integer; - DestPointer: pointer; - CntBVectorSizePlusCntBPos: integer; + CntA, CntB, MaxA, MaxB: integer; + {$IFDEF AVXANY} vRes: array[0..3] of Single; localNumElements, MissedElements: integer; + {$ENDIF} + PtrA, PtrB: TNeuralFloatArrPtr; Result: TNeuralFloat; begin @@ -5175,8 +5301,10 @@ procedure TNNetVolume.DotProducts(NumAs, NumBs, VectorSize: integer; VAs, VBs: T //localNumElements := (VectorSize div 4) * 4; //MissedElements := VectorSize - localNumElements; + {$IFDEF AVXANY} MissedElements := VectorSize and 3; localNumElements := VectorSize xor MissedElements; + {$ENDIF} for CntB := 0 to MaxB do begin @@ -5418,11 +5546,12 @@ procedure TNNetVolume.DotProducts(NumAs, NumBs, VectorSize: integer; VAs, VBs: T procedure TNNetVolume.DotProductsTiled(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); var - CntA, CntB, CntAPos, CntBPos, MaxA, MaxB: integer; - DestPointer: pointer; - CntBVectorSizePlusCntBPos: integer; + CntA, CntB: integer; + {$IFDEF AVXANY} vRes: array[0..3] of Single; - localNumElements, MissedElements: integer; + localNumElements : integer; + MissedElements: integer; + {$ENDIF} PtrA, PtrB: TNeuralFloatArrPtr; Result: TNeuralFloat; // Tiling @@ -5430,13 +5559,12 @@ procedure TNNetVolume.DotProductsTiled(NumAs, NumBs, VectorSize: integer; VAs, V StartTileA, EndTileA, StartTileB, EndTileB: integer; MaxTileA, MaxTileB: integer; begin - MaxA := NumAs - 1; - MaxB := NumBs - 1; - //localNumElements := (VectorSize div 4) * 4; //MissedElements := VectorSize - localNumElements; + {$IFDEF AVXANY} MissedElements := VectorSize and 3; localNumElements := VectorSize xor MissedElements; + {$ENDIF} MaxTileA := (NumAs div TileSizeA) - 1; MaxTileB := (NumBs div TileSizeB) - 1; for TileBCnt := 0 to MaxTileB do @@ -5694,13 +5822,14 @@ procedure TNNetVolume.DotProductsTiled(NumAs, NumBs, VectorSize: integer; VAs, V procedure TNNetVolume.GroupedDotProductsTiled(Groups, NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); var - CntA, CntB, CntAPos, CntBPos, MaxA, MaxB: integer; + CntA, CntB: integer; GroupId, GroupASize: integer; VectoreBSize: integer; + {$IFDEF AVXANY} DestPointer: pointer; - CntBVectorSizePlusCntBPos: integer; vRes: array[0..3] of Single; localNumElements, MissedElements: integer; + {$ENDIF} PtrA, PtrB: TNeuralFloatArrPtr; Result: TNeuralFloat; // Tiling @@ -5708,8 +5837,6 @@ procedure TNNetVolume.GroupedDotProductsTiled(Groups, NumAs, NumBs, StartTileA, EndTileA, StartTileB, EndTileB: integer; MaxTileA, MaxTileB: integer; begin - MaxA := NumAs - 1; - MaxB := NumBs - 1; GroupASize := NumAs div Groups; VectoreBSize := VectorSize * Groups; @@ -5727,8 +5854,10 @@ procedure TNNetVolume.GroupedDotProductsTiled(Groups, NumAs, NumBs, //localNumElements := (VectorSize div 4) * 4; //MissedElements := VectorSize - localNumElements; + {$IFDEF AVXANY} MissedElements := VectorSize and 3; localNumElements := VectorSize xor MissedElements; + {$ENDIF} MaxTileA := (NumAs div TileSizeA) - 1; MaxTileB := (NumBs div TileSizeB) - 1; for TileBCnt := 0 to MaxTileB do @@ -6000,31 +6129,26 @@ procedure TNNetVolume.AddArea(DestX, DestY, OriginX, OriginY, LenX, end; end; +// ########################################### +// #### local definitions for AVX determiniation + +var locAVX : boolean = False; + locAVX2 : boolean = False; + locAVX512 : boolean = False; + function TNNetVolume.HasAVX: boolean; begin - {$IFDEF AVXANY} - Result := true; - {$ELSE} - Result := false; - {$ENDIF} + Result := locAVX; end; function TNNetVolume.HasAVX2: boolean; begin - {$IFDEF AVX2} - Result := true; - {$ELSE} - Result := false; - {$ENDIF} + Result := locAVX2; end; function TNNetVolume.HasAVX512: boolean; begin - {$IFDEF AVX512} - Result := true; - {$ELSE} - Result := false; - {$ENDIF} + Result := locAVX512; end; function TNNetVolume.PearsonCorrelation(Y: TNNetVolume): TNeuralFloat; @@ -9230,9 +9354,25 @@ class function TVolume.DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: i ): Single; var I: integer; + {$IFDEF FPC} BasePos, vHigh: integer; AddrA, AddrB: TNeuralFloatPtr; + {$ENDIF} begin + {$IFNDEF FPC} + if false //locAVX and (NumElements > 4) + then + Result := AVXDotProd(PSingle(PtrA), PSingle(PtrB), NumElements) + else + begin + Result := 0; + for i := 0 to NumElements - 1 do + Result := Result + PtrA^[i]*PtrB^[i]; + end; + + exit; + + {$ELSE} Result := 0; BasePos := 0; vHigh := NumElements - 1; @@ -9276,6 +9416,7 @@ class function TVolume.DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: i end; //WriteLn('Hello: ', Result); //ReadLn(); + {$ENDIF} end; class function TVolume.Product(PtrA: TNeuralFloatArrPtr; @@ -9323,4 +9464,13 @@ procedure TNNetVolumePairList.SetItem(Index: Integer; AObject: TNNetVolumePair); end; {$ENDIF} + +// ########################################### +// #### Initialize cpu set variables +// ########################################### + +initialization + locAVX := IsAVXPresent; + locAVX2 := IsFMAPresent; + locAVX512 := IsAVX512Present; end. diff --git a/neural/neuralvolumev.pas b/neural/neuralvolumev.pas index 20341090..b77102d7 100644 --- a/neural/neuralvolumev.pas +++ b/neural/neuralvolumev.pas @@ -25,7 +25,7 @@ interface uses Classes, SysUtils, ExtCtrls, Graphics, neuralvolume, - {$IFDEF FPC}LCLType, FPImage {$ELSE}Winapi.Windows{$ENDIF} ; + {$IFDEF FPC}LCLType, FPImage {$ELSE}Windows{$ENDIF} ; /// saves a bitmap into a file from a handle HWND procedure SaveHandleToBitmap(OutputFileName: string; hWnd: HWND); @@ -47,7 +47,7 @@ procedure LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume); {$ENDIF} implementation -{$IFDEF FPC}uses LCLIntf;{$ENDIF} +uses {$IFDEF FPC}LCLIntf,{$ENDIF}Math; procedure SaveHandleToBitmap(OutputFileName: string; hWnd: HWND); {$IFDEF FPC} From 09e7790e453d9e2375414ceb8f95f9acf4c44ccf Mon Sep 17 00:00:00 2001 From: mikerabat Date: Mon, 1 Aug 2022 13:29:44 +0200 Subject: [PATCH 02/13] missing Delphi avx --- neural/neuralvolume.pas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index c52ba1d9..9c209c65 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -9360,7 +9360,7 @@ class function TVolume.DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: i {$ENDIF} begin {$IFNDEF FPC} - if false //locAVX and (NumElements > 4) + if locAVX and (NumElements > 4) then Result := AVXDotProd(PSingle(PtrA), PSingle(PtrB), NumElements) else From 4420615bb4693ccc368c58463df4bfb5f8df1951 Mon Sep 17 00:00:00 2001 From: mikerabat Date: Wed, 31 May 2023 16:04:14 +0200 Subject: [PATCH 03/13] Check for missing opencl dll --- neural/neuralopencl.pas | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/neural/neuralopencl.pas b/neural/neuralopencl.pas index 71c88250..2d252c64 100644 --- a/neural/neuralopencl.pas +++ b/neural/neuralopencl.pas @@ -907,6 +907,11 @@ procedure TEasyOpenCL.LoadPlatforms(); buf: TNeuralStrBuffer; bufwritten: csize_t; begin + if @clGetPlatformIDs = nil then + begin + FErrorProc('Error: no opencl dll found'); + exit; + end; bufwritten := 0; err := clGetPlatformIDs(0, nil, @local_platforms); if (err <> CL_SUCCESS) then From 1b2556bbff22f86802211fee72b359011c50970b Mon Sep 17 00:00:00 2001 From: mikerabat Date: Wed, 16 Aug 2023 14:03:16 +0200 Subject: [PATCH 04/13] Introduced loadFromStream --- neural/neuralnetwork.pas | 59 ++++++++++++++++++++++++++++++++++++---- 1 file changed, 53 insertions(+), 6 deletions(-) diff --git a/neural/neuralnetwork.pas b/neural/neuralnetwork.pas index dbb81a69..6a0d3d22 100644 --- a/neural/neuralnetwork.pas +++ b/neural/neuralnetwork.pas @@ -114,13 +114,27 @@ TNNetNeuron = class (TMObject) procedure ClearDelta; {$IFDEF Release} inline; {$ENDIF} // Initializers + + // Weight Initializer - Uniform Distribution. procedure InitUniform(Value: TNeuralFloat = 1); + // Weight Initializer - Gaussian Distribution. procedure InitGaussian(Value: TNeuralFloat = 1); + // Weight Initializer - LeCun 98, Efficient Backprop + // http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf procedure InitLeCunUniform(Value: TNeuralFloat = 1); + // Weight Initializer - This implementation is inspired on: + // Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification + // Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun + // https://arxiv.org/abs/1502.01852 + // He initializations are also called Kaiming initializations. procedure InitHeUniform(Value: TNeuralFloat = 1); - procedure InitHeGaussian(Value: TNeuralFloat = 1); + // Weight Initializer - same as InitHeUniform for depthwise convolutions. procedure InitHeUniformDepthwise(Value: TNeuralFloat = 1); + // Weight Initializer - same as InitHeUniform with gaussian distribution. + procedure InitHeGaussian(Value: TNeuralFloat = 1); + // Weight Initializer - same as InitHeGaussian for depthwise convolutions. procedure InitHeGaussianDepthwise(Value: TNeuralFloat = 1); + // Weight Initializer for SELU activation function. procedure InitSELU(Value: TNeuralFloat = 1); property Weights: TNNetVolume read FWeights; @@ -139,9 +153,13 @@ TNNetNeuronList = class (TNNetList) public property Items[Index: Integer]: TNNetNeuron read GetItem write SetItem; default; {$ENDIF} + // Creates the list with ElementCount elements. constructor CreateWithElements(ElementCount: integer); + // Returns the maximum weight value. function GetMaxWeight(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} + // Returns the maximum absolute weight value. function GetMaxAbsWeight(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} + // Returns the minimum weight value. function GetMinWeight(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} procedure InitForDebug(); end; @@ -260,15 +278,32 @@ TNNetLayer = class(TMObject) procedure ResetBackpropCallCurrCnt(); {$IFDEF Release} inline; {$ENDIF} // Initializers + + // Weight Initializer - Uniform Distribution. function InitUniform(Value: TNeuralFloat = 1): TNNetLayer; + // Weight Initializer - LeCun 98, Efficient Backprop + // http://yann.lecun.com/exdb/publis/pdf/lecun-98b.pdf function InitLeCunUniform(Value: TNeuralFloat = 1): TNNetLayer; + // Weight Initializer - This implementation is inspired on: + // Delving Deep into Rectifiers: Surpassing Human-Level Performance on ImageNet Classification + // Kaiming He, Xiangyu Zhang, Shaoqing Ren, Jian Sun + // https://arxiv.org/abs/1502.01852 // He initializations are also called Kaiming initializations. function InitHeUniform(Value: TNeuralFloat = 1): TNNetLayer; + // Weight Initializer - same as InitHeUniform for depthwise convolutions. function InitHeUniformDepthwise(Value: TNeuralFloat = 1): TNNetLayer; + // Weight Initializer - same as InitHeUniform with gaussian distribution. function InitHeGaussian(Value: TNeuralFloat = 0.5): TNNetLayer; + // Weight Initializer - same as InitHeGaussian for depthwise convolutions. function InitHeGaussianDepthwise(Value: TNeuralFloat = 0.5): TNNetLayer; // Glorot Bengio initializations are also called Xavier initializations. + // This implementation is inspired on: + // Understanding the difficulty of training deep feedforward neural networks + // Xavier Glorot, Yoshua Bengio ; Proceedings of the Thirteenth International + // Conference on Artificial Intelligence and Statistics, PMLR 9:249-256, 2010. + // http://proceedings.mlr.press/v9/glorot10a.html function InitGlorotBengioUniform(Value: TNeuralFloat = 1): TNNetLayer; + // Weight Initializer for SELU activation function. function InitSELU(Value: TNeuralFloat = 1): TNNetLayer; procedure InitDefault(); virtual; @@ -1499,6 +1534,7 @@ TNNet = class(TMObject) procedure LoadFromString(strData: string); // Load both architecture and weights from file (complete saving). procedure LoadFromFile(filename: string); + procedure LoadFromStream(stream : TStream); // Returns a cloned neural network function Clone(): TNNet; @@ -11629,11 +11665,12 @@ procedure TNNet.SaveToFile(filename: string); S.Free; end; -procedure TNNet.LoadFromString(strData: string); +procedure TNNet.LoadFromStream(stream: TStream); var S: TStringList; begin - S := CreateTokenizedStringList(strData, '>'); + S := CreateTokenizedStringList('>'); + S.LoadFromStream(stream); if (S.Count = 2) then begin @@ -11651,12 +11688,11 @@ procedure TNNet.LoadFromString(strData: string); S.Free; end; -procedure TNNet.LoadFromFile(filename: string); +procedure TNNet.LoadFromString(strData: string); var S: TStringList; begin - S := CreateTokenizedStringList('>'); - S.LoadFromFile(filename); + S := CreateTokenizedStringList(strData, '>'); if (S.Count = 2) then begin @@ -11674,6 +11710,17 @@ procedure TNNet.LoadFromFile(filename: string); S.Free; end; +procedure TNNet.LoadFromFile(filename: string); +var fs : TFileStream; +begin + fs := TFileStream.Create(filename, fmOpenRead or fmShareDenyWrite); + try + LoadFromStream(fs); + finally + fs.Free; + end; +end; + function TNNet.Clone(): TNNet; //var NNData: String; //begin From 543969e50e5c9e5f66fb22db9abaaa581525b5a2 Mon Sep 17 00:00:00 2001 From: mikerabat Date: Mon, 25 Sep 2023 16:23:13 +0200 Subject: [PATCH 05/13] Allow more than 64cores by utilizing the group affinity api -> an azure VM with 120 cores could only access 60 cores --- neural/neuralthread.pas | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/neural/neuralthread.pas b/neural/neuralthread.pas index 59761766..bdbbd77f 100644 --- a/neural/neuralthread.pas +++ b/neural/neuralthread.pas @@ -175,11 +175,18 @@ procedure CreateNeuralThreadListIfRequired(); function NeuralDefaultThreadCount: integer; begin + {$IFDEF MSWINDOWS} + // for systems with more than 64 cores thes System.CPUCount only returns 64 at max + // -> we need to group them together so count the cpu's differntly + // https://learn.microsoft.com/en-us/windows/win32/procthread/processor-groups + Result := GetActiveProcessorCount(ALL_PROCESSOR_GROUPS); // get all of all groups + {$ELSE} {$IFDEF FPC} Result := GetSystemThreadCount; {$ELSE} Result := System.CPUCount; {$ENDIF} + {$ENDIF} end; {$IFDEF FPC} @@ -330,7 +337,40 @@ procedure TNeuralThreadList.WaitForProc(); { TNeuralThread } procedure TNeuralThread.Execute; +{$IFDEF MSWINDOWS} + +var i : integer; + numGroups : integer; + maxIdxInGroup : integer; + ga : TGroupAffinity; +{$ENDIF} begin + {$IFDEF MSWINDOWS} + // set group affinity + maxIdxInGroup := -1; + numGroups := GetActiveProcessorGroupCount; + + // set affinity to physical cpu's - leave it as is otherwise + for i := 0 to numGroups - 1 do + begin + maxIdxInGroup := maxIdxInGroup + Integer(GetActiveProcessorCount(i)); + if maxIdxInGroup >= FIndex then + begin + FillChar( ga, sizeof(ga), 0); + GetThreadGroupAffinity(GetCurrentThread, ga); + ga.Group := Word(i); + if not SetThreadGroupAffinity( Handle, ga, nil) then + RaiseLastOSError; + + break; + end; + end; + {$ENDIF} + + + + // ########################################### + // #### do the work while (not Terminated) do begin FNeuronStart.WaitFor(INFINITE); From 5504e75026ded88661b91c016ab8ea21a3dd882b Mon Sep 17 00:00:00 2001 From: mikerabat Date: Wed, 27 Sep 2023 11:15:31 +0200 Subject: [PATCH 06/13] Merged changes from "master". --- README.md | 47 +- .../ColorectalImageClassification.pas | 99 ++++ examples/DelphiTemplate/Unit1.pas | 184 ++++++ .../MalariaImageClassification.pas | 97 +++ examples/OnlyTwoLayers/OnlyTwoLayersAbs.pas | 92 +++ examples/SelfTest/SelfTest.dpr | 40 ++ .../SimplePlantLeafDisease.pas | 8 +- .../uvisualautoencodertinyimagenet.pas | 99 +--- examples/VisualGAN/uvisualgan.pas | 2 +- neural/neuraldatasets.pas | 322 +++++++--- neural/neuraldatasetsv.pas | 22 +- neural/neuralfit.pas | 69 ++- neural/neuralnetwork.pas | 475 +++++++++++++-- neural/neuralopenclv.pas | 2 +- neural/neuralthread.pas | 38 +- neural/neuralvolume.pas | 551 +++++++++++++++++- neural/neuralvolumev.pas | 15 +- 17 files changed, 1908 insertions(+), 254 deletions(-) create mode 100644 examples/ColorectalImageClassification/ColorectalImageClassification.pas create mode 100644 examples/DelphiTemplate/Unit1.pas create mode 100644 examples/MalariaImageClassification/MalariaImageClassification.pas create mode 100644 examples/OnlyTwoLayers/OnlyTwoLayersAbs.pas create mode 100644 examples/SelfTest/SelfTest.dpr diff --git a/README.md b/README.md index b3dd31c3..25cdba8a 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@ CAI NEURAL API is a pascal based deep learning neural network API optimized for AVX, AVX2 and AVX512 instruction sets plus OpenCL capable devices including AMD, Intel and NVIDIA. This API has been tested under Windows and Linux. -This project is a subproject from a bigger and older project called [CAI](https://sourceforge.net/projects/cai/) and is sister to Keras based [K-CAI NEURAL API](https://github.com/joaopauloschuler/k-neural-api). +This project is a subproject from a bigger and older project called [CAI](https://sourceforge.net/projects/cai/) and is sister to Keras based [K-CAI NEURAL API](https://github.com/joaopauloschuler/k-neural-api). You can find trained neural network models in the [pre-trained-neural-api-networks](https://github.com/joaopauloschuler/pre-trained-neural-api-networks/) repository. ## Why Pascal? * Compiled pascal code is super fast! This API can outperform some major APIs in some architectures. @@ -77,6 +77,29 @@ These examples train a neural network to classify images in classes such as: ima * [Many neural network architectures for CIFAR-10 image classification](https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/testcnnalgo/testcnnalgo.lpr) * [MNIST](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SimpleMNist), [Fashion MNIST](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SimpleFashionMNIST) and [CIFAR-100](https://github.com/joaopauloschuler/neural-api/tree/master/examples/Cifar100CaiDenseNet) +You can save and load trained models (neural networks) with `TNNet.SaveToFile` and `TNNet.LoadFromFile`. The file format is portable meaning that you can train on CPU and run on GPU or train in AMD and run on ARM as examples. The following code shows a simple example for image classification loading a [pre-trained](https://github.com/joaopauloschuler/pre-trained-neural-api-networks/) model: +``` + procedure ClassifyOneImageSimple; + var + NN: TNNet; + ImageFileName: string; + NeuralFit: TNeuralImageFit; + begin + WriteLn('Loading Neural Network...'); + NN := TNNet.Create; + NN.LoadFromFile('SimplePlantLeafDisease-20230720.nn'); + NeuralFit := TNeuralImageFit.Create; + ImageFileName := 'plant/Apple___Black_rot/image (1).JPG'; + WriteLn('Processing image: ', ImageFileName); + WriteLn( + 'The class of the image is: ', + NeuralFit.ClassifyImageFromFile(NN, ImageFileName) + ); + NeuralFit.Free; + NN.Free; + end; +``` + ### Youtube Videos There are some available videos: * [Increasing Image Resolution with Neural Networks](https://www.youtube.com/watch?v=jdFixaZ2P4w) @@ -88,15 +111,15 @@ it's mentioned. ### Advanced Examples Although these examples require deeper understanding about neural networks, they are very interesting: +* [Identity Shortcut Connection](https://github.com/joaopauloschuler/neural-api/tree/master/examples/IdentityShortcutConnection) - ResNet building block [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/IdentityShortcutConnection/IdentityShortcutConnection.ipynb) +* [ResNet-20](https://github.com/joaopauloschuler/neural-api/blob/master/examples/ResNet/) - includes a [web server](examples/ResNet/server) example * [DenseNetBC L40](https://github.com/joaopauloschuler/neural-api/tree/master/examples/DenseNetBCL40) -* [ResNet-20](https://github.com/joaopauloschuler/neural-api/blob/master/examples/ResNet/ResNet20.lpr) * [Separable Convolutions](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SeparableConvolution) - MobileNet building block -* [Identity Shortcut Connection](https://github.com/joaopauloschuler/neural-api/tree/master/examples/IdentityShortcutConnection) - ResNet building block [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/IdentityShortcutConnection/IdentityShortcutConnection.ipynb) * [Gradient Ascent](https://github.com/joaopauloschuler/neural-api/tree/master/examples/GradientAscent) - Visualizing patterns from inner neurons in image classification

* [Artificial Art](https://github.com/joaopauloschuler/neural-api/tree/master/examples/VisualGAN) - Let a neural network produce art via a generative adversarial network

* [Super Resolution](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SuperResolution) - A neural network learns how to increase image resolution

* [CIFAR-10 Resized](https://github.com/joaopauloschuler/neural-api/tree/master/examples/Cifar10Resize) - A program that resizes CIFAR-10 and CIFAR-100 images to 64x64 and 128x128 pixels.

-* [Autoencoder](https://github.com/joaopauloschuler/neural-api/tree/master/examples/VisualAutoencoder) - Shows an autoencoder built with hyperbolic tangents and trained with [Tiny ImageNet 200](https://tiny-imagenet.herokuapp.com/).

+* [Autoencoder](https://github.com/joaopauloschuler/neural-api/tree/master/examples/VisualAutoencoder) - Shows an autoencoder built with hyperbolic tangents and trained with [Tiny ImageNet 200](https://paperswithcode.com/dataset/tiny-imagenet).

There are also some [older code examples](https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/) that you can look at. @@ -259,6 +282,7 @@ This API is really big. The following list gives a general idea about this API b * `TNNetSplitChannels` (input: 1D, 2D or 3D / output: 1D, 2D or 3D). Splits (or copies) channels from the input. This layer allows getting a subset of the input channels. * `TNNetSplitChannelEvery` (input: 1D, 2D or 3D / output: 1D, 2D or 3D). Splits (or copies) channels from the input every few channels. As example, this layer allows getting half (GetChannelEvery=2) or a third (GetChannelEvery=3) of the input channels. * `TNNetSum` (input/output: 1D, 2D or 3D). Sums outputs from previous layers allowing ResNet style networks. +* `TNNetUpsample` (input/output: 3D). Converts channels (depth) into spatial data. For example, a 128x128x256 activation map will be converted to 256x256x64. The number of channels is always divided by 4 while the resolution increases. ### Layers with Activation Functions and no Trainable Parameter * `TNNetReLU` (input/output: 1D, 2D or 3D). @@ -272,6 +296,7 @@ This API is really big. The following list gives a general idea about this API b * `TNNetSoftMax` (input/output: 1D, 2D or 3D). * `TNNetSwish` (input/output: 1D, 2D or 3D). * `TNNetSwish6` (input/output: 1D, 2D or 3D). +* `TNNetHardSwish` (input/output: 1D, 2D or 3D). * `TNNetHyperbolicTangent` (input/output: 1D, 2D or 3D). * `TNNetPower` (input/output: 1D, 2D or 3D). @@ -344,7 +369,8 @@ NEURAL | Keras | PyTorch `TNNetSplitChannelEvery` | | `TNNetSum` | `layers.Add` | `torch.add` `TNNetCellMulByCell` | `layers.Multiply` | -`TNNetChannelMulByLayer` | `layers.Multiply` | +`TNNetChannelMulByLayer` | `layers.Multiply` | +`TNNetUpsample` | `tf.nn.depth_to_space` | ## Adding Layers @@ -445,7 +471,9 @@ CreateVolumesFromImagesFromFolder The example above shows how to load the dataset with 90% loaded into training and 5% loaded for each validation and testing. Images are being resized to 128x128. Source code examples: -* [Simple Plant Leaf Disease Image Classifier for the PlantVillage Dataset](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SimplePlantLeafDisease) +* [Simple Plant Leaf Disease Image Classifier for the PlantVillage Dataset](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SimplePlantLeafDisease) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb) +* [Colorectal Cancer Dataset Image Classifier](https://github.com/joaopauloschuler/neural-api/tree/master/examples/ColorectalImageClassification) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/ColorectalImageClassification/ColorectalCancerClassification.ipynb) +* [Malaria Dataset Image Classifier](https://github.com/joaopauloschuler/neural-api/tree/master/examples/MalariaImageClassification) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/MalariaImageClassification/MalariaClassification.ipynb) * [Tiny ImageNet 200](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleTinyImageNet) #### Is your Dataset too Big for RAM? You should use TNeuralImageLoadingFit. @@ -617,21 +645,20 @@ When you are done, you should call: FProcs.Free; ``` -## Scientific Publications from the Author +## Publications from the Author In the case that you would like to know more about what the CAI's author is working at, here we go. Optimizing the first layers of a convolutional neural network: - [Color-aware two-branch DCNN for efficient plant disease classification](https://www.researchgate.net/publication/361511874_Color-Aware_Two-Branch_DCNN_for_Efficient_Plant_Disease_Classification). - [Reliable Deep Learning Plant Leaf Disease Classification Based on Light-Chroma Separated Branches.](https://www.researchgate.net/publication/355215213_Reliable_Deep_Learning_Plant_Leaf_Disease_Classification_Based_on_Light-Chroma_Separated_Branches) -- [Optimizing CNNs first layer with respect to color encoding.](https://www.researchgate.net/publication/357204289_Optimizing_CNNs_first_layer_with_respect_to_color_encoding) Optimizing deep layers of a convolutional neural network: - [Grouped Pointwise Convolutions Reduce Parameters in Convolutional Neural Networks.](https://www.researchgate.net/publication/360226228_Grouped_Pointwise_Convolutions_Reduce_Parameters_in_Convolutional_Neural_Networks) -- [Grouped Pointwise Convolutions Significantly Reduces Parameters in EfficientNet.](https://www.researchgate.net/publication/355214501_Grouped_Pointwise_Convolutions_Significantly_Reduces_Parameters_in_EfficientNet) +- [An Enhanced Scheme for Reducing the Complexity of Pointwise Convolutions in CNNs for Image Classification Based on Interleaved Grouped Filters without Divisibility Constraints.](https://www.researchgate.net/publication/363413038_An_Enhanced_Scheme_for_Reducing_the_Complexity_of_Pointwise_Convolutions_in_CNNs_for_Image_Classification_Based_on_Interleaved_Grouped_Filters_without_Divisibility_Constraints) Publicações em Português: - [A Evolução dos Algoritmos Mentais.](https://www.researchgate.net/publication/357204541_A_Evolucao_dos_Algoritmos_Mentais) -- [Da Física à Inteligência Extrassomática.](http://schulers.com/jpss/estudos/schuler_inteligencia_artificial.pdf) +- [Da Física à Inteligência Extrassomática.](https://www.researchgate.net/publication/365687206_DA_FISICA_A_INTELIGENCIA_EXTRASSOMATICA) - [Inteligência Artificial Popperiana.](https://www.researchgate.net/publication/357164807_Inteligencia_Artificial_Popperiana) - [Operações Lógicas Quânticas e Colorabilidade de Grafos.](https://www.researchgate.net/publication/357205247_Operacoes_Logicas_Quanticas_e_Colorabilidade_de_Grafos) diff --git a/examples/ColorectalImageClassification/ColorectalImageClassification.pas b/examples/ColorectalImageClassification/ColorectalImageClassification.pas new file mode 100644 index 00000000..5689fc90 --- /dev/null +++ b/examples/ColorectalImageClassification/ColorectalImageClassification.pas @@ -0,0 +1,99 @@ +///This file has an implementation to classify +// the Colorectal Cancer Dataset: +// https://zenodo.org/record/53169/ +// https://zenodo.org/record/53169/files/Kather_texture_2016_image_tiles_5000.zip?download=1 +// https://www.tensorflow.org/datasets/catalog/colorectal_histology + +// Change ProportionToLoad to a smaller number if you don't have available 4GB of RAM. + +program ColorectalImageClassification; +(* + Coded by Joao Paulo Schwarz Schuler. + https://github.com/joaopauloschuler/neural-api +*) +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, SysUtils, CustApp, neuralnetwork, neuralvolume, Math, neuraldatasets, + neuralfit; + +type + TTestCNNAlgo = class(TCustomApplication) + protected + procedure DoRun; override; + end; + + procedure TTestCNNAlgo.DoRun; + var + NN: TNNet; + NeuralFit: TNeuralImageFit; + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; + ProportionToLoad: Single; + begin + WriteLn('Creating Neural Network...'); + NN := TNNet.Create(); + NN.AddLayer([ + TNNetInput.Create(128, 128, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}4, {Stride=}2), + TNNetMaxPool.Create(2), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetMaxPool.Create(2), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}2), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(8), + TNNetSoftMax.Create() + ]); + NN.DebugStructure(); + // change ProportionToLoad to a smaller number if you don't have available 8GB of RAM. + ProportionToLoad := 1; + WriteLn('Loading ', Round(ProportionToLoad*100), '% of the Plant leave disease dataset into memory.'); + CreateVolumesFromImagesFromFolder + ( + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, + {FolderName=}'Kather_texture_2016_image_tiles_5000', {pImageSubFolder=}'', + {color_encoding=}0{RGB}, + {TrainingProp=}0.9*ProportionToLoad, + {ValidationProp=}0.05*ProportionToLoad, + {TestProp=}0.05*ProportionToLoad, + {NewSizeX=}128, {NewSizeY=}128 + ); + + WriteLn + ( + 'Training Images:', ImgTrainingVolumes.Count, + ' Validation Images:', ImgValidationVolumes.Count, + ' Test Images:', ImgTestVolumes.Count + ); + + NeuralFit := TNeuralImageFit.Create; + NeuralFit.FileNameBase := 'Colorectal'; + NeuralFit.InitialLearningRate := 0.001; + NeuralFit.LearningRateDecay := 0.01; + NeuralFit.CyclicalLearningRateLen := 10; + NeuralFit.StaircaseEpochs := 10; + NeuralFit.Inertia := 0.9; + NeuralFit.L2Decay := 0.00001; + NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}8, {batchsize=}64, {epochs=}250); + NeuralFit.Free; + + NN.Free; + ImgTestVolumes.Free; + ImgValidationVolumes.Free; + ImgTrainingVolumes.Free; + Terminate; + end; + +var + Application: TTestCNNAlgo; +begin + Application := TTestCNNAlgo.Create(nil); + Application.Title:='Colorectal Cancer Image Classification'; + Application.Run; + Application.Free; +end. diff --git a/examples/DelphiTemplate/Unit1.pas b/examples/DelphiTemplate/Unit1.pas new file mode 100644 index 00000000..eaf67a35 --- /dev/null +++ b/examples/DelphiTemplate/Unit1.pas @@ -0,0 +1,184 @@ +unit Unit1; + +interface + +uses + System.SysUtils, System.Types, System.UITypes, System.Classes, System.Variants, + FMX.Types, FMX.Controls, FMX.Forms, FMX.Graphics, FMX.Dialogs, + FMX.Controls.Presentation, FMX.StdCtrls, + // Neural specifc files. + neuralnetwork, neuralvolume, neuraldatasets, neuralfit, neuralthread; + +// In Delphi, in project options: +// * At compiler, search path (-U), you'll add the "neural" folder: ..\..\neural\ +// * Still at the compiler, set the final output directory (-E) to: ..\..\bin\x86_64-win64\bin\ +// * In "generate console application", set it to true. + +// In your "uses" section, include: +// neuralnetwork, neuralvolume, neuraldatasets, neuralfit, neuralthread; + +type + TForm1 = class(TForm) + Button1: TButton; + Button2: TButton; + procedure Button1Click(Sender: TObject); + procedure Button2Click(Sender: TObject); + private + { Private declarations } + public + { Public declarations } + end; + +var + Form1: TForm1; + +implementation + + +type + // Define the input and output types for training data + TBackInput = array[0..3] of array[0..1] of TNeuralFloat; // Input data for OR operation + TBackOutput = array[0..3] of array[0..0] of TNeuralFloat; // Expected output for OR operation + +const + cs_false = 0.1; // Encoding for "false" value + cs_true = 0.8; // Encoding for "true" value + cs_threshold = (cs_false + cs_true) / 2; // Threshold for neuron activation + +const + cs_inputs : TBackInput = + ( + // Input data for OR operation + (cs_false, cs_false), + (cs_false, cs_true), + (cs_true, cs_false), + (cs_true, cs_true) + ); + +const + cs_outputs : TBackOutput = + ( + // Expected outputs for OR operation + (cs_false), + (cs_true), + (cs_true), + (cs_true) + ); + + procedure RunSimpleLearning(); + var + NN: TNNet; + EpochCnt: integer; + Cnt: integer; + pOutPut: TNNetVolume; + vInputs: TBackInput; + vOutput: TBackOutput; + begin + NN := TNNet.Create(); + + // Create the neural network layers + NN.AddLayer(TNNetInput.Create(2)); // Input layer with 2 neurons + NN.AddLayer(TNNetFullConnectLinear.Create(1)); // Single neuron layer connected to both inputs from the previous layer. + + NN.SetLearningRate(0.01, 0.9); // Set the learning rate and momentum + + vInputs := cs_inputs; // Assign the input data + vOutput := cs_outputs; // Assign the expected output data + pOutPut := TNNetVolume.Create(1, 1, 1, 1); // Create a volume to hold the output + + WriteLn('Value encoding FALSE is: ', cs_false:4:2); // Display the encoding for "false" + WriteLn('Value encoding TRUE is: ', cs_true:4:2); // Display the encoding for "true" + WriteLn('Threshold is: ', cs_threshold:4:2); // Display the threshold value + WriteLn; + + for EpochCnt := 1 to 1200 do + begin + for Cnt := Low(cs_inputs) to High(cs_inputs) do + begin + // Feed forward and backpropagation + NN.Compute(vInputs[Cnt]); // Perform feedforward computation + NN.GetOutput(pOutPut); // Get the output of the network + NN.Backpropagate(vOutput[Cnt]); // Perform backpropagation to adjust weights + + if EpochCnt mod 100 = 0 then + WriteLn( + EpochCnt:7, 'x', Cnt, + ' Inputs: ', cs_inputs[Cnt][0]:3:1,', ' ,cs_inputs[Cnt][1]:3:1, + ' Output:', pOutPut.Raw[0]:5:2,' ', + ' - Training/Desired Output: ', vOutput[cnt][0]:5:2,' ' + ); + end; + + if EpochCnt mod 100 = 0 then + begin + WriteLn(''); + end; + + end; + + NN.DebugWeights(); // Display the final weights of the network + + pOutPut.Free; // Free the memory allocated for output + NN.Free; // Free the memory allocated for the network + + end; + + procedure RunNeuralNetwork; + var + NN: TNNet; + NeuralFit: TNeuralImageFit; + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; + begin + if not CheckCIFARFile() then + begin + exit; + end; + WriteLn('Creating Neural Network...'); + NN := TNNet.Create(); + NN.AddLayer([ + TNNetInput.Create(32, 32, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), + TNNetMaxPool.Create(4), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(10), + TNNetSoftMax.Create() + ]); + NN.DebugStructure(); + CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); + + NeuralFit := TNeuralImageFit.Create; + NeuralFit.FileNameBase := 'SimpleImageClassifier-'+IntToStr(GetProcessId()); + NeuralFit.InitialLearningRate := 0.001; + NeuralFit.LearningRateDecay := 0.01; + NeuralFit.StaircaseEpochs := 10; + NeuralFit.Inertia := 0.9; + NeuralFit.L2Decay := 0; + NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); + NeuralFit.Free; + + NN.Free; + ImgTestVolumes.Free; + ImgValidationVolumes.Free; + ImgTrainingVolumes.Free; + end; + + +{$R *.fmx} + +procedure TForm1.Button1Click(Sender: TObject); +begin + RunNeuralNetwork; +end; + +procedure TForm1.Button2Click(Sender: TObject); +begin + RunSimpleLearning; +end; + +end. diff --git a/examples/MalariaImageClassification/MalariaImageClassification.pas b/examples/MalariaImageClassification/MalariaImageClassification.pas new file mode 100644 index 00000000..b44f46d5 --- /dev/null +++ b/examples/MalariaImageClassification/MalariaImageClassification.pas @@ -0,0 +1,97 @@ +///This file has an implementation to classify +// malaria infected cell images. +// +// You can get the dataset at +// https://data.lhncbc.nlm.nih.gov/public/Malaria/cell_images.zip +// https://www.tensorflow.org/datasets/catalog/malaria + +program MalariaImageClassification; +(* + Coded by Joao Paulo Schwarz Schuler. + https://github.com/joaopauloschuler/neural-api +*) +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, SysUtils, CustApp, neuralnetwork, neuralvolume, Math, neuraldatasets, + neuralfit; + +type + TTestCNNAlgo = class(TCustomApplication) + protected + procedure DoRun; override; + end; + + procedure TTestCNNAlgo.DoRun; + var + NN: TNNet; + NeuralFit: TNeuralImageFit; + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; + ProportionToLoad: Single; + begin + WriteLn('Creating Neural Network...'); + NN := TNNet.Create(); + NN.AddLayer([ + TNNetInput.Create(64, 64, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}4, {Stride=}1), + TNNetMaxPool.Create(2), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetMaxPool.Create(2), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), + TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}2), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(2), + TNNetSoftMax.Create() + ]); + NN.DebugStructure(); + // change ProportionToLoad to a smaller number if you don't have available 4GB of RAM. + ProportionToLoad := 1; + WriteLn('Loading ', Round(ProportionToLoad*100), '% of the malaria dataset into memory.'); + CreateVolumesFromImagesFromFolder + ( + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, + {FolderName=}'cell_images', {pImageSubFolder=}'', + {color_encoding=}0{RGB}, + {TrainingProp=}0.9*ProportionToLoad, + {ValidationProp=}0.05*ProportionToLoad, + {TestProp=}0.05*ProportionToLoad, + {NewSizeX=}64, {NewSizeY=}64 + ); + + WriteLn + ( + 'Training Images:', ImgTrainingVolumes.Count, + ' Validation Images:', ImgValidationVolumes.Count, + ' Test Images:', ImgTestVolumes.Count + ); + + NeuralFit := TNeuralImageFit.Create; + NeuralFit.FileNameBase := 'Malaria'; + NeuralFit.InitialLearningRate := 0.001; + NeuralFit.LearningRateDecay := 0.01; + NeuralFit.StaircaseEpochs := 10; + NeuralFit.Inertia := 0.9; + NeuralFit.L2Decay := 0.00001; + NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}2, {batchsize=}64, {epochs=}50); + NeuralFit.Free; + + NN.Free; + ImgTestVolumes.Free; + ImgValidationVolumes.Free; + ImgTrainingVolumes.Free; + Terminate; + end; + +var + Application: TTestCNNAlgo; +begin + Application := TTestCNNAlgo.Create(nil); + Application.Title:='Malaria Cell Infection Classification'; + Application.Run; + Application.Free; +end. diff --git a/examples/OnlyTwoLayers/OnlyTwoLayersAbs.pas b/examples/OnlyTwoLayers/OnlyTwoLayersAbs.pas new file mode 100644 index 00000000..26957981 --- /dev/null +++ b/examples/OnlyTwoLayers/OnlyTwoLayersAbs.pas @@ -0,0 +1,92 @@ +program OnlyTwoLayersAbs; +(* +OnlyTwoLayersAbs: this free pascal source code trains a neural network +that contains only one neuron to learn the function f(x,y) = Abs(x + y). +Copyright (C) 2023 Joao Paulo Schwarz Schuler + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along +with this program; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +*) + + +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, + neuralnetwork, + neuralvolume, + neuralfit; + +type + // Define the input and output types for training data + TBackInput = array[0..1] of TNeuralFloat; // Input data for Abs(x + y) + TBackOutput = array[0..0] of TNeuralFloat; // Expected output for Abs(x + y) + +procedure RunAlgo(); +var + NN: TNNet; + EpochCnt: integer; + pOutPut: TNNetVolume; + vInputs: TBackInput; + vOutput: TBackOutput; +begin + NN := TNNet.Create(); + + // Create the neural network layers + NN.AddLayer(TNNetInput.Create(2)); // Input layer with 2 neurons + NN.AddLayer(TNNetFullConnectReLU.Create(2)); // Single neuron layer connected to both inputs from the previous layer. + NN.AddLayer(TNNetFullConnectLinear.Create(1)); // Single neuron layer connected to both inputs from the previous layer. + + NN.SetLearningRate(0.00001, 0); // Set the learning rate and momentum + + pOutPut := TNNetVolume.Create(1, 1, 1, 1); // Create a volume to hold the output + + WriteLn; + + for EpochCnt := 1 to 1000000 do + begin + vInputs[0] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vInputs[1] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vOutput[0] := Abs(vInputs[0] + vInputs[1]); // Abs(X+Y) + // Feed forward and backpropagation + NN.Compute(vInputs); // Perform feedforward computation + NN.GetOutput(pOutPut); // Get the output of the network + NN.Backpropagate(vOutput); // Perform backpropagation to adjust weights + + if EpochCnt mod 5000 = 0 then + WriteLn( + EpochCnt:7, 'x', + ' Output:', pOutPut.Raw[0]:5:2,' ', + ' - Training/Desired Output:', vOutput[0]:5:2,' ' + ); + end; + + NN.DebugWeights(); // Display the final weights of the network + + pOutPut.Free; // Free the memory allocated for output + NN.Free; // Free the memory allocated for the network + + Write('Press ENTER to exit.'); + ReadLn; +end; + +var + // Stops Lazarus errors + Application: record Title:string; end; + +begin + Application.Title:='Only Two Layers - Abs(x+y)'; + RunAlgo(); +end. diff --git a/examples/SelfTest/SelfTest.dpr b/examples/SelfTest/SelfTest.dpr new file mode 100644 index 00000000..1938c065 --- /dev/null +++ b/examples/SelfTest/SelfTest.dpr @@ -0,0 +1,40 @@ +program SelfTest; + +{$APPTYPE CONSOLE} + +uses + Classes, + SysUtils, + Math, + CPUFeatures in '..\..\neural\CPUFeatures.pas', + Neural.AVX in '..\..\neural\Neural.AVX.pas', + Neural.AVXx64 in '..\..\neural\Neural.AVXx64.pas', + neuralab in '..\..\neural\neuralab.pas', + neuralabfun in '..\..\neural\neuralabfun.pas', + neuralbit in '..\..\neural\neuralbit.pas', + neuralbyteprediction in '..\..\neural\neuralbyteprediction.pas', + neuralcache in '..\..\neural\neuralcache.pas', + neuraldatasets in '..\..\neural\neuraldatasets.pas', + neuraldatasetsv in '..\..\neural\neuraldatasetsv.pas', + neuralevolutionary in '..\..\neural\neuralevolutionary.pas', + neuralfit in '..\..\neural\neuralfit.pas', + neuralgeneric in '..\..\neural\neuralgeneric.pas', + neuralnetwork in '..\..\neural\neuralnetwork.pas', + neuralopencl in '..\..\neural\neuralopencl.pas', + neuralopenclv in '..\..\neural\neuralopenclv.pas', + neuralplanbuilder in '..\..\neural\neuralplanbuilder.pas', + neuralthread in '..\..\neural\neuralthread.pas', + neuralvolume in '..\..\neural\neuralvolume.pas', + neuralvolumev in '..\..\neural\neuralvolumev.pas'; + +begin + WriteLn('Testing Volumes API ...'); + TestTNNetVolume(); + TestKMeans(); + + WriteLn('Testing Convolutional API ...'); + TestConvolutionAPI; + + WriteLn('Press ENTER to quit.'); + ReadLn; +end. diff --git a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas index 99455261..41523b4c 100644 --- a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas +++ b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas @@ -30,8 +30,8 @@ TTestCNNAlgo = class(TCustomApplication) WriteLn('Creating Neural Network...'); NN := TNNet.Create(); NN.AddLayer([ - TNNetInput.Create(128, 128, 3), - TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}4, {Stride=}2), + TNNetInput.Create(64, 64, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}4, {Stride=}1), TNNetMaxPool.Create(2), TNNetMovingStdNormalization.Create(), TNNetConvolutionReLU.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1), @@ -46,7 +46,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetSoftMax.Create() ]); NN.DebugStructure(); - // change ProportionToLoad to a smaller number if you don't have available 32GB of RAM. + // change ProportionToLoad to a smaller number if you don't have available 16GB of RAM. ProportionToLoad := 1; WriteLn('Loading ', Round(ProportionToLoad*100), '% of the Plant leave disease dataset into memory.'); CreateVolumesFromImagesFromFolder @@ -57,7 +57,7 @@ TTestCNNAlgo = class(TCustomApplication) {TrainingProp=}0.9*ProportionToLoad, {ValidationProp=}0.05*ProportionToLoad, {TestProp=}0.05*ProportionToLoad, - {NewSizeX=}128, {NewSizeY=}128 + {NewSizeX=}64, {NewSizeY=}64 ); WriteLn diff --git a/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.pas b/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.pas index da9c51cc..0d0b5d0e 100644 --- a/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.pas +++ b/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.pas @@ -106,8 +106,6 @@ implementation procedure TFormVisualLearning.ButLearnClick(Sender: TObject); begin - if not CheckCIFARFile() then exit; - if (FRunning) then begin SendStop; @@ -166,47 +164,14 @@ procedure TFormVisualLearning.FormDestroy(Sender: TObject); end; procedure TFormVisualLearning.DisplayInputImage(ImgInput: TNNetVolume; color_encoding: integer); -var - pMin0, pMax0: TNeuralFloat; - pMin1, pMax1: TNeuralFloat; - pMin2, pMax2: TNeuralFloat; begin FDisplay.Resize(ImgInput); FDisplay.Copy(ImgInput); - - if color_encoding = csEncodeLAB then - begin - FDisplay.GetMinMaxAtDepth(0, pMin0, pMax0); - FDisplay.GetMinMaxAtDepth(1, pMin1, pMax1); - FDisplay.GetMinMaxAtDepth(2, pMin2, pMax2); - pMax0 := Max(Abs(pMin0), Abs(pMax0)); - pMax1 := Max(Abs(pMin1), Abs(pMax1)); - pMax2 := Max(Abs(pMin2), Abs(pMax2)); - - if pMax0 > 2 then - begin - FDisplay.MulAtDepth(0, 2/pMax0); - end; - - if pMax1 > 2 then - begin - FDisplay.MulAtDepth(1, 2/pMax1); - end; - - if pMax2 > 2 then - begin - FDisplay.MulAtDepth(2, 2/pMax2); - end; - end - else if FDisplay.GetMaxAbs() > 2 then - begin - FDisplay.NormalizeMax(2); - end; + FDisplay.ForceMaxRange(2); //Debug only: FDisplay.PrintDebugChannel(); FDisplay.NeuronalInputToRgbImg(color_encoding); - LoadVolumeIntoTImage(FDisplay, aImage[FImageCnt]); aImage[FImageCnt].Width := 128; aImage[FImageCnt].Height := 128; @@ -246,7 +211,7 @@ procedure TFormVisualLearning.Learn( Sender: TObject); if ChkBigNetwork.Checked then NeuronMultiplier := 2 else NeuronMultiplier := 1; - FBaseName := 'IMAGEART'+IntToStr(NeuronMultiplier)+'-'; + FBaseName := 'IMAGEART-v1.1-'+IntToStr(NeuronMultiplier)+'-'; if RadRGB.Checked then begin FColorEncoding := csEncodeRGB; @@ -280,30 +245,29 @@ procedure TFormVisualLearning.Learn( Sender: TObject); WriteLn('Creating auto encoder.'); FAutoencoder.AddLayer([ TNNetInput.Create(64, 64, 3), - TNNetConvolution.Create(32 * NeuronMultiplier,3,1,2,1), //32x32 - TNNetConvolution.Create(32 * NeuronMultiplier,3,1,1,1), - TNNetConvolution.Create(32 * NeuronMultiplier,3,1,2,1), //16x16 - TNNetConvolution.Create(32 * NeuronMultiplier,3,1,1,1), - TNNetConvolution.Create(64 * NeuronMultiplier,3,1,2,0), //8x8 - TNNetConvolution.Create(64 * NeuronMultiplier,3,1,1,0), - - TNNetConvolution.Create(128 * NeuronMultiplier,3,1,2,1), //4x4 - TNNetConvolution.Create(128 * NeuronMultiplier,3,1,1,1), - TNNetDeMaxPool.Create(2), - TNNetConvolution.Create(128 * NeuronMultiplier,5,2,1,1), //8x8 - TNNetConvolution.Create(128 * NeuronMultiplier,3,1,1,1), - - TNNetDeMaxPool.Create(2), - TNNetConvolution.Create(32 * NeuronMultiplier,5,2,1,1), //16x16 - TNNetConvolution.Create(32 * NeuronMultiplier,3,1,1,1), - TNNetDeMaxPool.Create(2), - TNNetConvolution.Create(32 * NeuronMultiplier,5,2,1,1), //32x32 - TNNetConvolution.Create(32 * NeuronMultiplier,3,1,1,1), - TNNetDeMaxPool.Create(2), - TNNetConvolution.Create(32 * NeuronMultiplier,5,2,1,1), //64x64 - TNNetConvolution.Create(32 * NeuronMultiplier,3,1,1,1), - TNNetConvolutionLinear.Create(3,1,0,1,0), - TNNetReLUL.Create(-40, +40) // Protection against overflow + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}2,{SuppressBias=}1), //32x32 + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}2,{SuppressBias=}1), //16x16 + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolution.Create({Features=}64 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}2,{SuppressBias=}1), //8x8 + TNNetConvolution.Create({Features=}64 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolution.Create({Features=}128 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}2,{SuppressBias=}1), //4x4 + TNNetConvolution.Create({Features=}128 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + + TNNetUpsample.Create(), //8x8 + TNNetConvolution.Create({Features=}128 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolution.Create({Features=}128 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetUpsample.Create(), //16x16 + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolution.Create({Features=}128 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetUpsample.Create(), //32x32 + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolution.Create({Features=}128 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetUpsample.Create(), //64x64 + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolution.Create({Features=}32 * NeuronMultiplier,{FeatureSize=}3,{Padding=}1,{Stride=}1,{SuppressBias=}1), + TNNetConvolutionLinear.Create({Features=}3,{FeatureSize=}1,{Padding=}0,{Stride=}1,{SuppressBias=}0), + TNNetReLUL.Create(-40, +40, 0) // Protection against overflow ]); end else @@ -312,8 +276,6 @@ procedure TFormVisualLearning.Learn( Sender: TObject); FAutoencoder.LoadFromFile(FBaseName+'autoencoder.nn'); end; FAutoencoder.DebugStructure(); - FAutoencoder.SetLearningRate(0.001,0.9); - FAutoencoder.SetL2Decay(0.0); FFit.OnAfterEpoch := @Self.AutoencoderOnAfterEpoch; FFit.OnAfterStep := @Self.AutoencoderOnAfterStep; @@ -322,6 +284,7 @@ procedure TFormVisualLearning.Learn( Sender: TObject); FFit.L2Decay := 0.0; FFit.AvgWeightEpochCount := 1; FFit.InitialLearningRate := 0.0001; + FFit.ClipDelta := 0.01; FFit.FileNameBase := FBaseName+'autoencoder'; FFit.EnableBipolar99HitComparison(); {$ifdef OpenCL} @@ -331,9 +294,8 @@ procedure TFormVisualLearning.Learn( Sender: TObject); FAutoencoder.EnableOpenCL(FEasyOpenCL.PlatformIds[0], FEasyOpenCL.Devices[0]); end; {$endif} - //Debug only: - FFit.MaxThreadNum := 2; - FFit.FitLoading(FAutoencoder, {EpochSize=}FTrainImages.CountElements(), 500, 500, {Batch=}64, {Epochs=}35000, @GetTrainingData, nil, nil); // This line does the same as above + //Debug only: FFit.MaxThreadNum := 1; + FFit.FitLoading(FAutoencoder, {EpochSize=}FTrainImages.CountElements(), 0, 0, {Batch=}64, {Epochs=}35000, @GetTrainingData, nil, nil); // This line does the same as above FAutoencoder.Free; end; @@ -352,6 +314,7 @@ procedure TFormVisualLearning.GetTrainingData(Idx: integer; procedure TFormVisualLearning.AutoencoderOnAfterEpoch(Sender: TObject); begin + WriteLn('Finished epoch number: ', FFit.CurrentEpoch); end; procedure TFormVisualLearning.AutoencoderOnAfterStep(Sender: TObject); @@ -360,13 +323,13 @@ procedure TFormVisualLearning.AutoencoderOnAfterStep(Sender: TObject); begin LabClassRate.Caption := PadLeft(IntToStr(Round(FFit.TrainingAccuracy*100))+'%',4); ProcessMessages(); - //if FFit.CurrentStep mod 10 = 0 then + if FFit.CurrentStep mod FFit.ThreadNN.Count = 0 then begin ClassId := FTrainImages.GetRandomClassId(); ImageId := FTrainImages.List[ClassId].GetRandomIndex(); FFit.NN.Compute(FTrainImages.List[ClassId].List[ImageId]); DisplayInputImage(FFit.NN.GetLastLayer().Output, 0); - FFit.NN.GetLastLayer().Output.PrintDebug(); + //Debug only: FFit.NN.GetLastLayer().Output.PrintDebug(); end; end; diff --git a/examples/VisualGAN/uvisualgan.pas b/examples/VisualGAN/uvisualgan.pas index 498eee48..399029ab 100644 --- a/examples/VisualGAN/uvisualgan.pas +++ b/examples/VisualGAN/uvisualgan.pas @@ -322,7 +322,7 @@ procedure TFormVisualLearning.Learn( Sender: TObject); TNNetConvolutionReLU.Create(32 * NeuronMultiplier,3,1,1,0), TNNetMovingStdNormalization.Create(), TNNetConvolutionLinear.Create(3,3,1,1,0), - TNNetReLUL.Create(-40, +40) // Protection against overflow + TNNetReLUL.Create(-40, +40, 0) // Protection against overflow ]); FGenerative.Layers[FGenerative.GetFirstImageNeuronalLayerIdx()].InitBasicPatterns(); end diff --git a/neural/neuraldatasets.pas b/neural/neuraldatasets.pas index ee561449..45b04ccf 100644 --- a/neural/neuraldatasets.pas +++ b/neural/neuraldatasets.pas @@ -27,7 +27,7 @@ interface uses - {$IFNDEF FPC} {$IF (CompilerVersion <= 21)} Classes, {$ELSE} System.Classes, {$IFEND} {$ENDIF} + {$IFNDEF FPC} Windows, {$IF (CompilerVersion <= 21)} Classes, Graphics, {$ELSE} System.Classes, Vcl.Graphics, {$IFEND} {$ENDIF} neuralvolume, neuralnetwork {$IFDEF FPC}, FPimage, FPReadBMP, FPReadPCX, FPReadJPEG, FPReadPNG, @@ -115,7 +115,6 @@ interface 'cat' // used to be truck ); -{$IFDEF FPC} type { TFileNameList } @@ -129,7 +128,7 @@ TFileNameList = class(TStringListInt) procedure GetImageVolumePairFromId(ImageId: integer; vInput, vOutput: TNNetVolume; ThreadDangerous: boolean = True); procedure GetRandomImagePair(vInput, vOutput: TNNetVolume); - function ThreadSafeLoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume):boolean; + procedure ThreadSafeLoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume); property ClassCount: integer read FClassCount write FClassCount; end; @@ -141,6 +140,7 @@ TClassesAndElements = class(TStringStringListVolume) FBaseFolder: string; FNewSizeX, FNewSizeY: integer; FColorEncoding: integer; + {$IFDEF HASTHREADS}FCritSecLoad: TRTLCriticalSection;{$ENDIF} public constructor Create(); destructor Destroy(); override; @@ -180,13 +180,21 @@ TClassesAndElements = class(TStringStringListVolume) FolderName, pImageSubFolder: string; TrainingProp, ValidationProp, TestProp: single); + {$IFDEF FPC} procedure LoadImageIntoVolume(M: TFPMemoryImage; Vol:TNNetVolume); procedure LoadVolumeIntoImage(Vol:TNNetVolume; M: TFPMemoryImage); + function SaveImageFromVolumeIntoFile(V:TNNetVolume; ImageFileName:string):boolean; + {$ENDIF} // Loads an image from a file and stores it into a Volume. - function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume):boolean; - function SaveImageFromVolumeIntoFile(V:TNNetVolume; ImageFileName:string):boolean; -{$ENDIF} + function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume):boolean; overload; + + // Loads an image from a file and stores it into a Volume resizing to + // SizeX, SizeY and optionally encoding as neuronal input if has a + // color encoding such as csEncodeRGB. + function LoadImageFromFileIntoVolume( + ImageFileName:string; V:TNNetVolume; SizeX, SizeY: integer; + EncodeNeuronalInput: integer = -1):boolean; overload; // Writes the header of a confusion matrix into a CSV file procedure ConfusionWriteCSVHeader(var CSVConfusion: TextFile; Labels: array of string); @@ -271,15 +279,88 @@ procedure TranslateCifar10VolumesToMachineAnimal(VolumeList: TNNetVolumeList); {$IFNDEF FPC} function SwapEndian(I:integer):integer; +procedure FindAllDirectories(AList: TStrings; const SearchPath: String; + SearchSubDirs: Boolean = true; PathSeparator: char = ';'); overload; +function DirectorySeparator: string; {$ENDIF} implementation uses SysUtils, math, neuralthread, - {$IFDEF FPC}fileutil{$ELSE} Windows{$ENDIF}; + {$IFDEF FPC}fileutil{$ELSE} {$IF CompilerVersion >= 23} System.IOUtils, {$IFEND} Types{$ENDIF}; + +{$IFNDEF FPC} +function SwapEndian(I:integer):integer; +begin + // valid for SmallInt + // result := Swap(I) + Result := ((Swap(Smallint(I)) and $ffff) shl $10) or (Swap(Smallint(I shr $10)) and $ffff) +end; + +procedure FindAllDirectories(AList: TStrings; const SearchPath: String; + SearchSubDirs: Boolean = true; PathSeparator: char = ';'); +{$IF CompilerVersion >= 23} + var + dirs: TStringDynArray; + dir, Path, SearchPattern: String; + SearchOption: TSearchOption; +begin + if SearchSubDirs + then SearchOption := TSearchOption.soAllDirectories + else SearchOption := TSearchOption.soTopDirectoryOnly; + Path := SearchPath; + SearchPattern := '*'; + dirs := TDirectory.GetDirectories(Path, SearchPattern, SearchOption);//, SearchSubDirs); + for dir in dirs do + begin + AList.Add(dir); + end; +end; +{$ELSE} +begin + // Not supported in Delphi 2010 +end; +{$IFEND} + +procedure FindAllFiles(AList: TStrings; const SearchPath: String; + const SearchMask: String = ''; SearchSubDirs: Boolean = True; DirAttr: Word = faDirectory; + MaskSeparator: char = ';'; PathSeparator: char = ';'); +{$IF CompilerVersion >= 23} +var + fileNames: TStringDynArray; + fileName, Path, SearchPattern: String; + SearchOption: TSearchOption; +begin + if SearchSubDirs + then SearchOption := TSearchOption.soAllDirectories + else SearchOption := TSearchOption.soTopDirectoryOnly; + Path := SearchPath; + SearchPattern := '*'; + fileNames := TDirectory.GetFiles(Path, SearchPattern, SearchOption);//, SearchSubDirs); + for fileName in fileNames do + begin + AList.Add(fileName); + end; +end; +{$ELSE} +begin + // not supported in delphi 2010 +end; +{$IFEND} + + +function DirectorySeparator: string; +begin + {$IF CompilerVersion >= 23} + Result := TPath.DirectorySeparatorChar; + {$ELSE} + Result := PathDelim; + {$IFEND} +end; + +{$ENDIF} -{$IFDEF FPC} procedure CreateVolumesFromImagesFromFolder(out ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; FolderName, pImageSubFolder: string; @@ -368,10 +449,16 @@ constructor TClassesAndElements.Create(); inherited Create(); FImageSubFolder := ''; FBaseFolder := ''; + {$IFDEF HASTHREADS} + NeuralInitCriticalSection(FCritSecLoad); + {$ENDIF} end; destructor TClassesAndElements.Destroy(); begin + {$IFDEF HASTHREADS} + NeuralDoneCriticalSection(FCritSecLoad); + {$ENDIF} inherited Destroy(); end; @@ -384,7 +471,7 @@ function TClassesAndElements.CountElements(): integer; begin for ClassId := 0 to Count - 1 do begin - Result += Self.List[ClassId].Count; + Result := Result + Self.List[ClassId].Count; end; end; end; @@ -409,7 +496,7 @@ procedure TClassesAndElements.LoadFoldersAsClasses(FolderName: string; pImageSub ClassFolder := Self[ClassCnt] + DirectorySeparator; if FImageSubFolder <> '' then begin - ClassFolder += FImageSubFolder + DirectorySeparator; + ClassFolder := ClassFolder + FImageSubFolder + DirectorySeparator; end; if not Assigned(Self.List[ClassCnt]) then begin @@ -447,13 +534,13 @@ procedure TClassesAndElements.LoadFoldersAsClassesProportional( ClassFolder := Self[ClassCnt] + DirectorySeparator; if FImageSubFolder <> '' then begin - ClassFolder += FImageSubFolder + DirectorySeparator; + ClassFolder := ClassFolder + FImageSubFolder + DirectorySeparator; end; if not Assigned(Self.List[ClassCnt]) then begin WriteLn(ClassFolder,' - error: not assigned list'); end; - FindAllFiles(Self.List[ClassCnt], ClassFolder, '*.png;*.jpg;*.jpeg;*.bmp', {SearchSubDirs} false); + FindAllFiles(Self.List[ClassCnt], ClassFolder, '*.png;*.jpg;*.jpeg;*.bmp;*.tif', {SearchSubDirs} false); Self.List[ClassCnt].FixObjects(); ElementCnt := Self.List[ClassCnt].Count; SkipFirst := Round(ElementCnt * fSkipFirst); @@ -574,7 +661,6 @@ procedure TClassesAndElements.AddVolumesTo(Volumes: TNNetVolumeList; EmptySource procedure TClassesAndElements.AddFileNamesTo(FileNames: TFileNameList); var - SourceVolume: TNNetVolume; ClassId, FileId: integer; MaxClassId, MaxFileId: integer; begin @@ -628,6 +714,7 @@ function TClassesAndElements.FileCountAtClassId(ClassId: integer): integer; result := Self.List[ClassId].Count; end; +{$IFDEF FPC} function TFileNameList.ThreadSafeLoadImageFromFileIntoVolume( ImageFileName: string; V: TNNetVolume): boolean; var @@ -662,6 +749,114 @@ function SaveImageFromVolumeIntoFile(V: TNNetVolume; ImageFileName: string M.Free; end; +procedure LoadImageIntoVolume(M: TFPMemoryImage; Vol:TNNetVolume); +var + CountX, CountY, MaxX, MaxY: integer; + LocalColor: TFPColor; + RawPos: integer; +begin + MaxX := M.Width - 1; + MaxY := M.Height - 1; + Vol.ReSize(MaxX + 1, MaxY + 1, 3); + + for CountX := 0 to MaxX do + begin + for CountY := 0 to MaxY do + begin + LocalColor := M.Colors[CountX, CountY]; + RawPos := Vol.GetRawPos(CountX, CountY, 0); + + Vol.FData[RawPos] := LocalColor.red shr 8; + Vol.FData[RawPos + 1] := LocalColor.green shr 8; + Vol.FData[RawPos + 2] := LocalColor.blue shr 8; + end; + end; +end; + +procedure LoadVolumeIntoImage(Vol: TNNetVolume; M: TFPMemoryImage); +var + CountX, CountY, MaxX, MaxY: integer; + LocalColor: TFPColor; + RawPos: integer; +begin + MaxX := Vol.SizeX - 1; + MaxY := Vol.SizeY - 1; + M.SetSize(Vol.SizeX, Vol.SizeY); + for CountX := 0 to MaxX do + begin + for CountY := 0 to MaxY do + begin + RawPos := Vol.GetRawPos(CountX, CountY, 0); + LocalColor.red := NeuronForceMinMax(Round(Vol.FData[RawPos]),0,255) shl 8; + LocalColor.green := NeuronForceMinMax(Round(Vol.FData[RawPos + 1]),0,255) shl 8; + LocalColor.blue := NeuronForceMinMax(Round(Vol.FData[RawPos + 2]),0, 255) shl 8; + M.Colors[CountX, CountY] := LocalColor; + end; + end; +end; +{$ELSE} +procedure LoadPictureIntoVolume(Picture: TPicture; Vol:TNNetVolume); +var + CountX, CountY, MaxX, MaxY: integer; + LocalColor: TColor; + RawPos: integer; +begin + MaxX := Picture.Width - 1; + MaxY := Picture.Height - 1; + Vol.ReSize(MaxX + 1, MaxY + 1, 3); + + for CountX := 0 to MaxX do + begin + for CountY := 0 to MaxY do + begin + LocalColor := Picture.Bitmap.Canvas.Pixels[CountX, CountY]; + RawPos := Vol.GetRawPos(CountX, CountY, 0); + + Vol.FData[RawPos] := LocalColor and 255; + Vol.FData[RawPos + 1] := (LocalColor shr 8) and 255; + Vol.FData[RawPos + 2] := (LocalColor shr 16) and 255; + end; + end; +end; + +procedure TFileNameList.ThreadSafeLoadImageFromFileIntoVolume( + ImageFileName: string; V: TNNetVolume); +var + LocalPicture: TPicture; +begin + LocalPicture := TPicture.Create; + {$IFDEF HASTHREADS}EnterCriticalSection(FCritSecLoad);{$ENDIF} + LocalPicture.LoadFromFile( ImageFileName ); + {$IFDEF HASTHREADS}LeaveCriticalSection(FCritSecLoad);{$ENDIF} + LoadPictureIntoVolume(LocalPicture, V); + LocalPicture.Free; +end; + +function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume):boolean; +var + LocalPicture: TPicture; +begin + LocalPicture := TPicture.Create; + LocalPicture.LoadFromFile( ImageFileName ); + LoadPictureIntoVolume(LocalPicture, V); + LocalPicture.Free; + Result := true; +end; + +(* +function SaveImageFromVolumeIntoFile(V: TNNetVolume; ImageFileName: string + ): boolean; +var + LocalPicture: TPicture; +begin + LocalPicture := TPicture.Create; + LoadVolumeIntoImage(V, M); + Result := M.SaveToFile(ImageFileName); + LocalPicture.Free; +end; +*) +{$ENDIF} + procedure TClassesAndElements.LoadImages_NTL(index, threadnum: integer); var SourceVolume: TNNetVolume; @@ -694,13 +889,21 @@ procedure TClassesAndElements.LoadImages_NTL(index, threadnum: integer); begin SourceVolume := Self.List[ClassId].List[ImageId]; // Debug: WriteLn('Loading: ', Self.GetFileName(ClassId, ImageId)); + try {$IFDEF FPC} - M.LoadFromFile( Self.GetFileName(ClassId, ImageId) ); - LoadImageIntoVolume(M, SourceVolume); + {$IFDEF HASTHREADS}EnterCriticalSection(FCritSecLoad);{$ENDIF} + M.LoadFromFile( Self.GetFileName(ClassId, ImageId) ); + {$IFDEF HASTHREADS}LeaveCriticalSection(FCritSecLoad);{$ENDIF} + LoadImageIntoVolume(M, SourceVolume); {$ELSE} - LocalPicture.LoadFromFile( Self.GetFileName(ClassId, ImageId) ); - LoadPictureIntoVolume(LocalPicture, SourceVolume); + LocalPicture.LoadFromFile( Self.GetFileName(ClassId, ImageId) ); + LoadPictureIntoVolume(LocalPicture, SourceVolume); {$ENDIF} + except + WriteLn('Failed loading image: ',Self.GetFileName(ClassId, ImageId)); + SourceVolume.ReSize(FNewSizeX, FNewSizeY, 3); + {$IFDEF HASTHREADS}LeaveCriticalSection(FCritSecLoad);{$ENDIF} + end; if (FNewSizeX > 0) and (FNewSizeY > 0) then begin if (SourceVolume.SizeX <> FNewSizeX) or (SourceVolume.SizeY <> FNewSizeY) then @@ -764,63 +967,6 @@ procedure CreateFileNameListsFromImagesFromFolder(out TrainingFileNames, ClassesAndElements.Free; end; -procedure LoadImageIntoVolume(M: TFPMemoryImage; Vol:TNNetVolume); -var - CountX, CountY, MaxX, MaxY: integer; - LocalColor: TFPColor; - RawPos: integer; -begin - MaxX := M.Width - 1; - MaxY := M.Height - 1; - Vol.ReSize(MaxX + 1, MaxY + 1, 3); - - for CountX := 0 to MaxX do - begin - for CountY := 0 to MaxY do - begin - LocalColor := M.Colors[CountX, CountY]; - RawPos := Vol.GetRawPos(CountX, CountY, 0); - - Vol.FData[RawPos] := LocalColor.red shr 8; - Vol.FData[RawPos + 1] := LocalColor.green shr 8; - Vol.FData[RawPos + 2] := LocalColor.blue shr 8; - end; - end; -end; - -procedure LoadVolumeIntoImage(Vol: TNNetVolume; M: TFPMemoryImage); -var - CountX, CountY, MaxX, MaxY: integer; - LocalColor: TFPColor; - RawPos: integer; -begin - MaxX := Vol.SizeX - 1; - MaxY := Vol.SizeY - 1; - M.SetSize(Vol.SizeX, Vol.SizeY); - for CountX := 0 to MaxX do - begin - for CountY := 0 to MaxY do - begin - RawPos := Vol.GetRawPos(CountX, CountY, 0); - LocalColor.red := NeuronForceMinMax(Round(Vol.FData[RawPos]),0,255) shl 8; - LocalColor.green := NeuronForceMinMax(Round(Vol.FData[RawPos + 1]),0,255) shl 8; - LocalColor.blue := NeuronForceMinMax(Round(Vol.FData[RawPos + 2]),0, 255) shl 8; - M.Colors[CountX, CountY] := LocalColor; - end; - end; -end; - -{$ENDIF} - -{$IFNDEF FPC} -function SwapEndian(I:integer):integer; -begin - // valid for SmallInt - // result := Swap(I) - Result := ((Swap(Smallint(I)) and $ffff) shl $10) or (Swap(Smallint(I shr $10)) and $ffff) -end; -{$ENDIF} - procedure TranslateCifar10VolumesToMachineAnimal(VolumeList: TNNetVolumeList); var i : integer; Volume: TNNetVolume; @@ -1186,6 +1332,34 @@ function CheckMNISTFile(fileName: string; IsFasion:boolean = false): boolean; end; end; +function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume; + SizeX, SizeY: integer; + EncodeNeuronalInput: integer = -1 + ): boolean; +var + VAux: TNNetVolume; +begin + if LoadImageFromFileIntoVolume(ImageFileName, V) then + begin + if (V.SizeX<>SizeX) or (V.SizeY<>SizeY) then + begin + VAux := TNNetVolume.Create; + VAux.Copy(V); + V.CopyResizing(VAux, SizeX, SizeY); + VAux.Free; + end; + if (EncodeNeuronalInput >= 0) then + begin + V.RgbImgToNeuronalInput( (EncodeNeuronalInput) and 255 ); + end; + Result := true; + end + else + begin + Result := false; + end; +end; + procedure ConfusionWriteCSVHeader(var CSVConfusion: TextFile; Labels: array of string); var I: integer; diff --git a/neural/neuraldatasetsv.pas b/neural/neuraldatasetsv.pas index 379a59e6..4f077638 100644 --- a/neural/neuraldatasetsv.pas +++ b/neural/neuraldatasetsv.pas @@ -25,9 +25,25 @@ interface uses - neuraldatasets, Classes, SysUtils, ExtCtrls, Graphics, - neuralvolume, neuralnetwork, StdCtrls - {$IFNDEF FPC}, Windows{$ENDIF} + neuraldatasets, Classes, SysUtils, + neuralvolume, neuralnetwork, + {$IFDEF FPC} + ExtCtrls, + StdCtrls, + Graphics + {$ENDIF} + {$IFNDEF FPC} + Windows, + {$IF CompilerVersion >= 23} + VCL.ExtCtrls, + VCL.StdCtrls, + VCL.Graphics + {$ELSE} + ExtCtrls, + StdCtrls, + Graphics + {$IFEND} + {$ENDIF} ; type diff --git a/neural/neuralfit.pas b/neural/neuralfit.pas index ae12acb0..628cfef4 100644 --- a/neural/neuralfit.pas +++ b/neural/neuralfit.pas @@ -156,6 +156,8 @@ TNeuralFitWithImageBase = class(TNeuralFitBase) constructor Create(); destructor Destroy(); override; procedure ClassifyImage(pNN: TNNet; pImgInput, pOutput: TNNetVolume); + procedure ClassifyImageFromFile(pNN: TNNet; pFilename: string; pOutput: TNNetVolume); overload; + function ClassifyImageFromFile(pNN: TNNet; pFilename: string):integer; overload; procedure EnableDefaultImageTreatment(); virtual; // ChannelShiftRate: 0 means no augmentation. 0.1 means 10% of maximum change per channel. @@ -733,12 +735,6 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, end; end;// Assigned(pGetValidationPair) - if (ValidationCnt=0) then - begin - FMessageProc('Saving NN at '+fileName); - FAvgWeight.SaveToFile(fileName); - end; - if (FCurrentEpoch mod FThreadNN.Count = 0) and (FVerbose) then begin FThreadNN[0].DebugWeights(); @@ -757,9 +753,14 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, break; end; end; - - if ( (FCurrentEpoch mod 10 = 0) and (FCurrentEpoch > 0) ) then - begin + end + else + begin + FMessageProc('Skipping Validation. Saving NN at '+fileName); + FAvgWeight.SaveToFile(fileName); + end; + if ( (FCurrentEpoch mod 10 = 0) and (FCurrentEpoch > 0) ) then + begin WriteLn ( CSVFile, @@ -805,7 +806,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, MessageProc( 'Epochs: '+IntToStr(FCurrentEpoch)+ '. Working time: '+FloatToStrF(Round((Now() - globalStartTime)*2400)/100,ffFixed,4,2)+' hours.'); - end; + if Assigned(FOnAfterEpoch) then FOnAfterEpoch(Self); end; @@ -1370,8 +1371,7 @@ procedure TNeuralDataLoadingFit.RunTrainingBatch(); {$ENDIF} if FClipDelta > 0 then begin - MaxDelta := FNN.ForceMaxAbsoluteDelta(FClipDelta); - MessageProc('Deltas have maxed to: '+FloatToStr(MaxDelta)); + FNN.ForceMaxAbsoluteDelta(FClipDelta); end else begin @@ -1797,8 +1797,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; {$ENDIF} if FClipDelta > 0 then begin - MaxDelta := FNN.ForceMaxAbsoluteDelta(FClipDelta); - MessageProc('Deltas have maxed to: '+FloatToStr(MaxDelta)); + FNN.ForceMaxAbsoluteDelta(FClipDelta); end else begin @@ -2427,6 +2426,16 @@ procedure TNeuralFitWithImageBase.ClassifyImage(pNN: TNNet; pImgInput, pOutput: sumOutput.Add( pOutput ); end; + if FHasFlipY then + begin + ImgInput.FlipY(); + TotalDiv := TotalDiv + 1; + pNN.Compute( ImgInput ); + pNN.GetOutput( pOutput ); + sumOutput.Add( pOutput ); + ImgInput.FlipY(); + end; + if FMaxCropSize >= 2 then begin ImgInputCp.CopyCropping(ImgInput, FMaxCropSize div 2, FMaxCropSize div 2, ImgInput.SizeX - FMaxCropSize, ImgInput.SizeY - FMaxCropSize); @@ -2446,6 +2455,38 @@ procedure TNeuralFitWithImageBase.ClassifyImage(pNN: TNNet; pImgInput, pOutput: ImgInput.Free; end; +procedure TNeuralFitWithImageBase.ClassifyImageFromFile(pNN: TNNet; + pFilename: string; pOutput: TNNetVolume); +var + vInputImage: TNNetVolume; + InputSizeX, InputSizeY, NumberOfClasses: integer; +begin + vInputImage := TNNetVolume.Create(); + InputSizeX := pNN.Layers[0].Output.SizeX; + InputSizeY := pNN.Layers[0].Output.SizeY; + NumberOfClasses := pNN.GetLastLayer().Output.Size; + if pOutput.Size <> NumberOfClasses then pOutput.ReSize(pNN.GetLastLayer().Output); + pOutput.Fill(0); + if LoadImageFromFileIntoVolume( + pFilename, vInputImage, InputSizeX, InputSizeY, + {EncodeNeuronalInput=}csEncodeRGB) then + begin + ClassifyImage(pNN, vInputImage, pOutput); + end; + vInputImage.Free; +end; + +function TNeuralFitWithImageBase.ClassifyImageFromFile(pNN: TNNet; + pFilename: string): integer; +var + vOutput: TNNetVolume; +begin + vOutput := TNNetVolume.Create(); + ClassifyImageFromFile(pNN, pFilename, vOutput); + Result := vOutput.GetClass(); + vOutput.Free; +end; + procedure TNeuralFitWithImageBase.EnableDefaultImageTreatment(); begin FColorEncoding := 0; diff --git a/neural/neuralnetwork.pas b/neural/neuralnetwork.pas index 6a0d3d22..9b18de7c 100644 --- a/neural/neuralnetwork.pas +++ b/neural/neuralnetwork.pas @@ -138,6 +138,7 @@ TNNetNeuron = class (TMObject) procedure InitSELU(Value: TNeuralFloat = 1); property Weights: TNNetVolume read FWeights; + property Bias: TNeuralFloat read FBiasWeight; property BackInertia: TNNetVolume read FBackInertia; property Delta: TNNetVolume read FDelta; end; @@ -225,21 +226,35 @@ TNNetLayer = class(TMObject) procedure DisableOpenCL(); virtual; procedure EnableOpenCL(DotProductKernel: TDotProductKernel); virtual; {$ENDIF} + // Computes the forward pass of this layer. procedure Compute(); virtual; abstract; + // Computes the backward pass. + // You may find theoretical info at https://en.wikipedia.org/wiki/Backpropagation. procedure Backpropagate(); virtual; abstract; + procedure ComputeOutputErrorForOneNeuron(NeuronIdx: integer; value: TNeuralFloat); procedure ComputeOutputErrorWith(pOutput: TNNetVolume); virtual; procedure ComputeOutputErrorForIdx(pOutput: TNNetVolume; const aIdx: array of integer); virtual; procedure ComputeErrorDeriv(); {$IFDEF FPC}{$IFDEF Release} inline; {$ENDIF}{$ENDIF} procedure Fill(value: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} procedure ClearDeltas(); {$IFDEF Release} inline; {$ENDIF} + // Adds neurons to the layer. procedure AddNeurons(NeuronNum: integer); + // Calculates the number of missing neurons so the layer can have + // NeuronNum neurons. The missing neurons are then added. procedure AddMissingNeurons(NeuronNum: integer); + // Defines the number of weights for all neurons in the layer. procedure SetNumWeightsForAllNeurons(NumWeights: integer); overload; + // Defines the number of weights for all neurons in the layer. procedure SetNumWeightsForAllNeurons(x, y, d: integer); overload; + // Defines the number of weights for all neurons in the layer copying + // the configuration found at the Origin parameters. procedure SetNumWeightsForAllNeurons(Origin: TNNetVolume); overload; + // Returns the maximum weight value from all neurons in the layer. function GetMaxWeight(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} + // Returns the maximum absolute weight value from all neurons in the layer. function GetMaxAbsWeight(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} + // Returns the minimum weight value from all neurons in the layer. function GetMinWeight(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetMaxDelta(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetMinDelta(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} @@ -247,34 +262,52 @@ TNNetLayer = class(TMObject) function ForceMaxAbsoluteWeight(vMax: TNeuralFloat): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetMaxAbsoluteDelta(): TNeuralFloat; virtual; procedure GetMinMaxAtDepth(pDepth: integer; var pMin, pMax: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} + // Returns the sum of all weights from all neurons in the layer. function GetWeightSum(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} + // Returns the sum of all biases from all neurons in the layer. function GetBiasSum(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetInertiaSum(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} + // Returns the number of weights in the layer. function CountWeights(): integer; {$IFDEF Release} inline; {$ENDIF} + // Returns the number of neurons in the layer. function CountNeurons(): integer; {$IFDEF Release} inline; {$ENDIF} + // Multiplies all weights in the layer by value V. procedure MulWeights(V:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} procedure MulDeltas(V:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} + // Clear all biases from all neurons in the layer. + procedure ClearBias(); {$IFDEF Release} inline; {$ENDIF} procedure ClearInertia(); {$IFDEF Release} inline; {$ENDIF} procedure ClearTimes(); {$IFDEF Release} inline; {$ENDIF} procedure AddTimes(Origin: TNNetLayer); {$IFDEF Release} inline; {$ENDIF} procedure CopyTimes(Origin: TNNetLayer); {$IFDEF Release} inline; {$ENDIF} procedure MulMulAddWeights(Value1, Value2: TNeuralFloat; Origin: TNNetLayer); {$IFDEF Release} inline; {$ENDIF} + // Sums all weights by their corresponding weights found at Origin. + // Both layers must have the same number of weights and neurons for this + // function to work as expected. procedure SumWeights(Origin: TNNetLayer); {$IFDEF Release} inline; {$ENDIF} procedure SumDeltas(Origin: TNNetLayer); {$IFDEF Release} inline; {$ENDIF} procedure SumDeltasNoChecks(Origin: TNNetLayer); {$IFDEF Release} inline; {$ENDIF} + // Copies all weights by their corresponding weights found at Origin. + // Both layers must have the same number of weights and neurons for this + // function to work as expected. procedure CopyWeights(Origin: TNNetLayer); virtual; procedure ForceRangeWeights(V:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} procedure ForcePositiveWeights(); {$IFDEF Release} inline; {$ENDIF} procedure NormalizeWeights(VMax: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} function SaveDataToString(): string; virtual; - procedure LoadDataFromString(strData: string); + procedure LoadDataFromString(strData: string); virtual; + // Saves the layer structure to a string so the layer can be later + // restored/reconstructed. function SaveStructureToString(): string; virtual; procedure SetBatchUpdate(pBatchUpdate: boolean); {$IFDEF Release} inline; {$ENDIF} procedure UpdateWeights(); {$IFDEF Release} inline; {$ENDIF} function InitBasicPatterns(): TNNetLayer; - // Backprop call cnt + // Increments an internal counter that counts how many branches load + // the output of the current layer. procedure IncDepartingBranchesCnt(); {$IFDEF Release} inline; {$ENDIF} + // Decrements an internal counter that counts how many branches load + // the output of the current layer. procedure ResetBackpropCallCurrCnt(); {$IFDEF Release} inline; {$ENDIF} // Initializers @@ -494,6 +527,13 @@ TNNetSwish = class(TNNetReLUBase) procedure Compute(); override; end; + /// Hard Swish Activation function + // https://paperswithcode.com/method/hard-swish + TNNetHardSwish = class(TNNetReLUBase) + public + procedure Compute(); override; + end; + /// Swish activation function with maximum limit of 6 TNNetSwish6 = class(TNNetReLUBase) public @@ -982,6 +1022,7 @@ TNNetConvolutionAbstract = class(TNNetLayerConcatedWeights) FFeatureSizeX, FFeatureSizeY: integer; FFeatureSizeYMinus1, FFeatureSizeXMinus1: integer; FInputCopy: TNNetVolume; + FPrevLayerErrorPadded: TNNetVolume; FSizeXDepth: integer; FSizeXDepthBytes: integer; FPrevSizeXDepthBytes: integer; @@ -989,6 +1030,7 @@ TNNetConvolutionAbstract = class(TNNetLayerConcatedWeights) function CalcOutputSize(pInputSize, pFeatureSize, pInputPadding, pStride: integer) : integer; procedure RefreshCalculatePrevLayerError(); procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + procedure RefreshPrevSizeXDepthBytes(); public constructor Create(pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); reintroduce; overload; destructor Destroy(); override; @@ -1100,6 +1142,12 @@ TNNetGroupedPointwiseConvReLU = class(TNNetGroupedPointwiseConvLinear) constructor Create(pNumFeatures, pGroups: integer; pSuppressBias: integer = 0); override; end; + /// Grouped pointwise convolution with HardSwish activation. + TNNetGroupedPointwiseConvHardSwish = class(TNNetGroupedPointwiseConvLinear) + public + constructor Create(pNumFeatures, pGroups: integer; pSuppressBias: integer = 0); override; + end; + /// Convolutional layer with hyperbolic tangent activation function. TNNetConvolution = class(TNNetConvolutionBase) protected @@ -1168,6 +1216,12 @@ TNNetConvolutionSwish = class(TNNetConvolution) constructor Create(pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); override; end; + /// Convolutional layer with Hard Swish activation function. + TNNetConvolutionHardSwish = class(TNNetConvolution) + public + constructor Create(pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); override; + end; + /// Pointwise convolution with tanh activation. TNNetPointwiseConv = class(TNNetConvolution) public @@ -1366,15 +1420,19 @@ TNNet = class(TMObject) constructor Create(); destructor Destroy(); override; + // Creates a layer from an input string. function CreateLayer(strData: string): TNNetLayer; + // Adds a layer or layers to this neural network. function AddLayer(pLayer: TNNetLayer): TNNetLayer; overload; function AddLayer(strData: string): TNNetLayer; overload; + // Adds a layer of layers after another layers (for non sequencial models) function AddLayer(pLayers: array of TNNetLayer): TNNetLayer; overload; function AddLayerAfter(pLayer, pAfterLayer: TNNetLayer): TNNetLayer; overload; function AddLayerAfter(pLayer: TNNetLayer; pAfterLayerIdx: integer): TNNetLayer; overload; function AddLayerAfter(strData: string; pAfterLayerIdx: integer): TNNetLayer; overload; function AddLayerAfter(pLayers: array of TNNetLayer; pLayer: TNNetLayer): TNNetLayer; overload; function AddLayerAfter(pLayers: array of TNNetLayer; pAfterLayerIdx: integer): TNNetLayer; overload; + // Adds a layer and concats the input into the output. function AddLayerConcatingInputOutput(pLayers: array of TNNetLayer): TNNetLayer; overload; function AddLayerConcatingInputOutput(pLayer: TNNetLayer): TNNetLayer; overload; function AddLayerDeepConcatingInputOutput(pLayers: array of TNNetLayer): TNNetLayer; overload; @@ -1447,23 +1505,51 @@ TNNet = class(TMObject) function AddAvgMaxChannel(pMaxPoolDropout: TNeuralFloat = 0; pKeepDepth:boolean = false; pAfterLayer: TNNetLayer = nil): TNNetLayer; procedure AddToExponentialWeightAverage(NewElement: TNNet; Decay: TNeuralFloat); procedure AddToWeightAverage(NewElement: TNNet; CurrentElementCount: integer); + // Returns the layer index of the first neuronal layer (layers that have neurons). function GetFirstNeuronalLayerIdx(FromLayerIdx:integer = 0): integer; {$IFDEF Release} inline; {$ENDIF} + // Returns the layer index of the first neuronal layer that can process an image as input. function GetFirstImageNeuronalLayerIdx(FromLayerIdx:integer = 0): integer; {$IFDEF Release} inline; {$ENDIF} function GetFirstNeuronalLayerIdxWithChannels(FromLayerIdx, Channels:integer): integer; {$IFDEF Release} inline; {$ENDIF} + // Returns the index of the last layer. function GetLastLayerIdx(): integer; {$IFDEF Release} inline; {$ENDIF} + // Returns the last layer. function GetLastLayer(): TNNetLayer; + // Returns a layer of random index. function GetRandomLayer(): TNNetLayer; + // Computes the forward pass with pInput. The output is returned + // at pOutput. You can optionally compute from an intermediate layer defined + // at FromLayerIdx. This method should be used when you have either multiple + // inputs or multiple outputs. procedure Compute(pInput, pOutput: TNNetVolumeList; FromLayerIdx:integer = 0); overload; + // Computes the forward pass with pInput. The output is returned + // at pOutput. You can optionally compute from an intermediate layer defined + // at FromLayerIdx. procedure Compute(pInput, pOutput: TNNetVolume; FromLayerIdx:integer = 0); overload; + // Computes the forward pass with pInput. + // You can optionally compute from an intermediate layer defined + // at FromLayerIdx. procedure Compute(pInput: TNNetVolume; FromLayerIdx:integer = 0); overload; + // Computes the forward pass with pInput. procedure Compute(pInput: array of TNNetVolume); overload; + // Computes the forward pass with pInput. procedure Compute(pInput: array of TNeuralFloatDynArr); overload; + // Computes the forward pass with pInput. procedure Compute(pInput: array of TNeuralFloat; FromLayerIdx:integer = 0); overload; + // Computes the backward pass. + // You may find theoretical info at https://en.wikipedia.org/wiki/Backpropagation. + // This method will train the neural network to find the desired pOutput + // for the previously called "compute" forward pass method. procedure Backpropagate(pOutput: TNNetVolume); overload; + // Computes the backward pass. + // You may find theoretical info at https://en.wikipedia.org/wiki/Backpropagation. + // This method will train the neural network to find the desired pOutput + // for the previously called "compute" forward pass method. + procedure Backpropagate(pOutput: array of TNeuralFloat); overload; procedure BackpropagateForIdx(pOutput: TNNetVolume; const aIdx: array of integer); procedure BackpropagateFromLayerAndNeuron(LayerIdx, NeuronIdx: integer; Error: TNeuralFloat); - procedure Backpropagate(pOutput: array of TNeuralFloat); overload; + // Returns the output. procedure GetOutput(pOutput: TNNetVolume); + // Sums the output of this NN into pOutput. procedure AddOutput(pOutput: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} procedure SetActivationFn(ActFn, ActFnDeriv: TNeuralActivationFunction); procedure SetLearningRate(pLearningRate, pInertia: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} @@ -1494,6 +1580,7 @@ TNNet = class(TMObject) function GetMaxAbsoluteDelta(): TNeuralFloat; function NormalizeMaxAbsoluteDelta(NewMax: TNeuralFloat = 0.1): TNeuralFloat; procedure ClearInertia(); {$IFDEF Release} inline; {$ENDIF} + procedure ClearBias(); {$IFDEF Release} inline; {$ENDIF} {$IFDEF OpenCL} procedure DisableOpenCL(); @@ -1525,22 +1612,27 @@ TNNet = class(TMObject) // Load architecture from string procedure LoadStructureFromString(strData: string); - // Save both architecture and weights to string (complete saving). + // Save both architecture and weights to string. + // You can use SaveToString to store a trained NN to a string. function SaveToString(): string; - // Save both architecture and weights to file (complete saving). + // Save both architecture and weights to file. + // You can use SaveToFile to store a trained NN to a file. procedure SaveToFile(filename: string); - // Save both architecture and weights from string (complete saving). + // Load both architecture and weights from string. + // You can use LoadFromString to load a trained NN from a string. procedure LoadFromString(strData: string); - // Load both architecture and weights from file (complete saving). + // Load both architecture and weights from file. + // You can use LoadFromFile to load a trained NN from a file. procedure LoadFromFile(filename: string); procedure LoadFromStream(stream : TStream); - // Returns a cloned neural network + // Clones the neural network and returns the clone. function Clone(): TNNet; - // deprecated + // deprecated - do not use it. procedure MulWeightsGlorotBengio(V:TNeuralFloat); deprecated; + // deprecated - do not use it. procedure MulWeightsHe(V:TNeuralFloat); deprecated; // custom layers support @@ -1898,6 +1990,94 @@ procedure RebuildNeuronListOnPreviousPatterns end; end; +{ TNNetGroupedPointwiseConvHardSwish } +constructor TNNetGroupedPointwiseConvHardSwish.Create(pNumFeatures, + pGroups: integer; pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pGroups, pSuppressBias); + FActivationFn := @HardSwish; + FActivationFnDerivative := @HardSwishDerivative; +end; + +{ TNNetHardSwish } + +procedure TNNetHardSwish.Compute(); +var + SizeM1: integer; + LocalPrevOutput: TNNetVolume; + OutputCnt: integer; + StartTime: double; + x: TNeuralFloat; +begin + StartTime := Now(); + LocalPrevOutput := FPrevLayer.Output; + SizeM1 := LocalPrevOutput.Size - 1; + + if (FOutput.Size = FOutputError.Size) and (FOutputErrorDeriv.Size = FOutput.Size) then + begin + for OutputCnt := 0 to SizeM1 do + begin + x := LocalPrevOutput.FData[OutputCnt]; + if x > 3 then + begin + FOutput.FData[OutputCnt] := x; + FOutputErrorDeriv.FData[OutputCnt] := 1; + end + else if x < -3 then + begin + FOutput.FData[OutputCnt] := 0; + FOutputErrorDeriv.FData[OutputCnt] := 0; + end + else + begin + FOutput.FData[OutputCnt] := x*(x + 3)/6; + FOutputErrorDeriv.FData[OutputCnt] := 0.3333*x + 0.5; + end; + end; + end + else + begin + // can't calculate error on input layers. + for OutputCnt := 0 to SizeM1 do + begin + x := LocalPrevOutput.FData[OutputCnt]; + if x > 3 then + begin + FOutput.FData[OutputCnt] := x; + end + else if x < -3 then + begin + FOutput.FData[OutputCnt] := 0; + end + else + begin + FOutput.FData[OutputCnt] := x*(x + 3)/6; + end; + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +{ TNNetConvolutionHardSwish } + +constructor TNNetConvolutionHardSwish.Create(pNumFeatures, pFeatureSize, + pInputPadding, pStride: integer; pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); + FActivationFn := @HardSwish; + FActivationFnDerivative := @HardSwishDerivative; +end; + +{ TNNetConvolutionSwish } + +constructor TNNetConvolutionSwish.Create(pNumFeatures, pFeatureSize, + pInputPadding, pStride: integer; pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); + FActivationFn := @Swish; + FActivationFnDerivative := @SwishDerivative; +end; + { TNNetScaleLearning } procedure TNNetScaleLearning.Compute(); @@ -2274,7 +2454,15 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); LocalDestPtr := nil; // Debug code: FOutputError.ForceMaxAbs(1); GroupDSize := OutputError.Depth div FStruct[5]; - LocalPrevError := FPrevLayer.OutputError; + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; //PrevNumElements := (FSizeXDepth div 4) * 4; //PrevMissedElements := FSizeXDepth - PrevNumElements; NeuronWeights := FArrNeurons[0].Delta.Size; @@ -2284,7 +2472,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); MissedElements := NeuronWeights - localNumElements; for OutputY := 0 to MaxY do begin - PrevY := (OutputY*FStride)-FPadding; + PrevY := (OutputY*FStride); for TileXCnt := 0 to FMaxTileX do begin StartTileX := TileXCnt * FTileSizeX; @@ -2297,9 +2485,9 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); begin for OutputX := StartTileX to EndTileX do begin - PrevX := (OutputX*FStride)-FPadding; + PrevX := (OutputX*FStride); CanBackpropOnPos := - (PrevX >= 0) and (PrevY >= 0) and + //(PrevX >= 0) and (PrevY >= 0) and (PrevX < FMaxPrevX) and (PrevY < FMaxPrevY); OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY, StartTileD); @@ -2403,6 +2591,7 @@ procedure TNNetGroupedConvolutionLinear.SetPrevLayer(pPrevLayer: TNNetLayer); GroupDSize: integer; OutputD: integer; GroupId, GroupDStart: integer; + LocalPrevError: TNNetVolume; begin inherited SetPrevLayer(pPrevLayer); FVectorSize := FFeatureSizeX*FFeatureSizeY*(pPrevLayer.Output.Depth div FStruct[5]); @@ -2420,8 +2609,18 @@ procedure TNNetGroupedConvolutionLinear.SetPrevLayer(pPrevLayer: TNNetLayer); FArrGroupId[OutputD] := GroupId; FArrGroupIdStart[OutputD] := GroupDStart; end; - FMaxPrevX := 1 + FPrevLayer.FOutput.SizeX - FFeatureSizeX; - FMaxPrevY := 1 + FPrevLayer.FOutput.SizeY - FFeatureSizeY; + + if FPadding > 0 then + begin + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; + + FMaxPrevX := 1 + LocalPrevError.SizeX - FFeatureSizeX; + FMaxPrevY := 1 + LocalPrevError.SizeY - FFeatureSizeY; end; constructor TNNetGroupedConvolutionLinear.Create(pNumFeatures, pFeatureSize, @@ -2464,7 +2663,8 @@ procedure TNNetGroupedConvolutionLinear.Compute(); FSizeXDepth := FFeatureSizeX * FInputCopy.Depth div FStruct[5]; FSizeXDepthBytes := FSizeXDepth * SizeOf(TNeuralFloat); - FPrevSizeXDepthBytes := FPrevLayer.Output.IncYSizeBytes(); + + RefreshPrevSizeXDepthBytes(); PrepareInputForGroupedConvolutionFast(); @@ -3752,18 +3952,28 @@ procedure TNNetDepthwiseConv.BackpropagateCPU(); bCanBackPropagate := (FPrevLayer.OutputError.Depth = FArrNeurons[0].Weights.Depth) and (FPrevLayer.OutputError.Size = FPrevLayer.Output.Size); + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + end; for CntY := 0 to MaxY do begin - PrevY := (CntY*FStride)-FPadding; + PrevY := (CntY*FStride); for CntX := 0 to MaxX do begin - PrevX := (CntX*FStride)-FPadding; + PrevX := (CntX*FStride); for NeuronIdx := 0 to MaxNeuronIdx do begin BackpropagateAtOutputPos(CntX, CntY, NeuronIdx, PrevX, PrevY, bCanBackPropagate); end; end; end; + + if FPadding > 0 then + begin + FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); + end; + //Write('Error:');FOutputError.PrintDebug();WriteLn; //Write('Error Deriv:');FOutputErrorDeriv.PrintDebug();WriteLn; if (not FBatchUpdate) then @@ -3796,7 +4006,15 @@ procedure TNNetDepthwiseConv.BackpropagateCPUFast(); MaxFeatureX := FFeatureSizeX - 1; MaxFeatureY := FFeatureSizeY - 1; MaxNeuronIdx := FNeurons.Count - 1; - LocalPrevError := FPrevLayer.OutputError; + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; LocalPrevSizeX := LocalPrevError.SizeX; LocalPrevSizeY := LocalPrevError.SizeY; bCanBackPropagate := @@ -3814,10 +4032,10 @@ procedure TNNetDepthwiseConv.BackpropagateCPUFast(); LocalWeight := FArrNeurons[0].Weights; for OutputY := 0 to MaxY do begin - PrevY := (OutputY*FStride)-FPadding; + PrevY := (OutputY*FStride); for OutputX := 0 to MaxX do begin - PrevX := (OutputX*FStride)-FPadding; + PrevX := (OutputX*FStride); OutputErrorDerivLearningPtr := FOutputError.GetRawPtr(OutputX, OutputY); OutputErrorDerivPtr := FOutputErrorDeriv.GetRawPtr(OutputX, OutputY); {$IFDEF Debug} @@ -3878,10 +4096,10 @@ procedure TNNetDepthwiseConv.BackpropagateCPUFast(); begin for OutputY := 0 to MaxY do begin - PrevY := (OutputY*FStride)-FPadding; + PrevY := (OutputY*FStride); for OutputX := 0 to MaxX do begin - PrevX := (OutputX*FStride)-FPadding; + PrevX := (OutputX*FStride); for NeuronIdx := 0 to MaxNeuronIdx do begin LocalDelta := FArrNeurons[NeuronIdx].Delta; @@ -3931,6 +4149,11 @@ procedure TNNetDepthwiseConv.BackpropagateCPUFast(); end; end; + if FPadding > 0 then + begin + FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); + end; + //Write('Error:');FOutputError.PrintDebug();WriteLn; //Write('Error Deriv:');FOutputErrorDeriv.PrintDebug();WriteLn; if (not FBatchUpdate) then @@ -3958,7 +4181,14 @@ procedure TNNetDepthwiseConv.BackpropagateAtOutputPos(OutputX, OutputY, LocalDepth := LocalWeight.Depth * NeuronIdx; OutputErrorDerivLearningPtr := FOutputError.GetRawPtr(OutputX, OutputY, LocalDepth); OutputErrorDerivPtr := FOutputErrorDeriv.GetRawPtr(OutputX, OutputY, LocalDepth); - LocalPrevError := FPrevLayer.OutputError; + if FPadding > 0 then + begin + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; LocalPrevSizeX := LocalPrevError.SizeX; LocalPrevSizeY := LocalPrevError.SizeY; {$IFDEF Debug} @@ -4159,7 +4389,7 @@ procedure TNNetDepthwiseConv.Compute(); else FInputCopy := FPrevLayer.Output; FSizeXDepth := FFeatureSizeX * FInputCopy.Depth; FSizeXDepthBytes := FSizeXDepth * SizeOf(TNeuralFloat); - FPrevSizeXDepthBytes := FPrevLayer.Output.IncYSizeBytes(); + RefreshPrevSizeXDepthBytes(); ComputeCPUFast(); FForwardTime := FForwardTime + (Now() - StartTime); end @@ -4184,6 +4414,7 @@ procedure TNNetDepthwiseConv.Backpropagate(); FOutputError.Copy(FOutputErrorDeriv); FOutputError.Mul(-FLearningRate); BackpropagateCPUFast(); + //BackpropagateCPU(); end else begin @@ -6461,6 +6692,12 @@ procedure CompareNNStructure(NN, NN2: TNNet); WriteLn('Error: weight sum doesn''t match on layer:',I); AllGood := False; end; + + if NN.Layers[I].GetBiasSum() <> NN2.Layers[I].GetBiasSum() then + begin + WriteLn('Error: bias sum doesn''t match on layer:',I); + AllGood := False; + end; end; if AllGood then WriteLn('Structural testing has passed.'); @@ -7994,14 +8231,6 @@ constructor TNNetConvolutionSwish6.Create(pNumFeatures, pFeatureSize, FActivationFnDerivative := @Swish6Derivative; end; -constructor TNNetConvolutionSwish.Create(pNumFeatures, pFeatureSize, - pInputPadding, pStride, pSuppressBias: integer); -begin - inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); - FActivationFn := @SwishUnit; - FActivationFnDerivative := @SwishDerivative; -end; - { TNNetPoolBase } procedure TNNetPoolBase.SetPrevLayer(pPrevLayer: TNNetLayer); var @@ -8347,6 +8576,22 @@ procedure TNNetConvolutionAbstract.SetPrevLayer(pPrevLayer: TNNetLayer); RefreshCalculatePrevLayerError(); FOutputSizeX := CalcOutputSize(pPrevLayer.Output.SizeX, FFeatureSizeX, FPadding, FStride); FOutputSizeY := CalcOutputSize(pPrevLayer.Output.SizeY, FFeatureSizeY, FPadding, FStride); + if FPadding > 0 then + begin + FPrevLayerErrorPadded.ReSize( pPrevLayer.OutputError.SizeX + FPadding*2, pPrevLayer.OutputError.SizeY + FPadding*2, pPrevLayer.OutputError.Depth ); + end; +end; + +procedure TNNetConvolutionAbstract.RefreshPrevSizeXDepthBytes(); +begin + if FPadding > 0 then + begin + FPrevSizeXDepthBytes := FPrevLayerErrorPadded.IncYSizeBytes(); + end + else + begin + FPrevSizeXDepthBytes := FPrevLayer.Output.IncYSizeBytes(); + end; end; function TNNetConvolutionAbstract.CalcOutputSize(pInputSize, pFeatureSize, pInputPadding, @@ -8412,7 +8657,15 @@ procedure TNNetConvolution.BackpropagateAtOutputPos(pCanBackpropOnPos: boolean; if (FCalculatePrevLayerError) then begin LocalWeight := FArrNeurons[OutputD].Weights; - LocalPrevError := FPrevLayer.OutputError; + if FPadding > 0 then + begin + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; + if FPointwise then begin LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); @@ -8634,7 +8887,8 @@ procedure TNNetConvolution.Compute(); FSizeXDepth := FFeatureSizeX * FInputCopy.Depth; FSizeXDepthBytes := FSizeXDepth * SizeOf(TNeuralFloat); - FPrevSizeXDepthBytes := FPrevLayer.Output.IncYSizeBytes(); + + RefreshPrevSizeXDepthBytes(); //FInputPrepared.ReSize(FOutput.SizeX, FOutput.SizeY, FInputCopy.Depth * FFeatureSizeX * FFeatureSizeY); PrepareInputForConvolutionFast(); @@ -8708,16 +8962,20 @@ procedure TNNetConvolution.BackpropagateCPU(); MaxX := OutputError.SizeX - 1; MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + end; for CntY := 0 to MaxY do begin - PrevY := (CntY*FStride)-FPadding; + PrevY := (CntY*FStride); for CntX := 0 to MaxX do begin - PrevX := (CntX*FStride)-FPadding; + PrevX := (CntX*FStride); OutputRawPos := FOutputErrorDeriv.GetRawPos(CntX, CntY); CanBackpropOnPos := - (PrevX >= 0) and (PrevY >= 0) and + //(PrevX >= 0) and (PrevY >= 0) and (PrevX < 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX) and (PrevY < 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY); for NeuronIdx := 0 to MaxD do @@ -8728,6 +8986,11 @@ procedure TNNetConvolution.BackpropagateCPU(); end; end; + if FPadding > 0 then + begin + FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); + end; + if (not FBatchUpdate) then begin for NeuronIdx := 0 to MaxD do FNeurons[NeuronIdx].UpdateWeights(FInertia); @@ -8763,9 +9026,17 @@ procedure TNNetConvolution.BackpropagateFastCPU(); MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; LocalDestPtr := nil; - MaxPrevX := 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX; - MaxPrevY := 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY; - LocalPrevError := FPrevLayer.OutputError; + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; + MaxPrevX := 1 + LocalPrevError.SizeX - FFeatureSizeX; + MaxPrevY := 1 + LocalPrevError.SizeY - FFeatureSizeY; // PrevNumElements := (FSizeXDepth div 4) * 4; // PrevMissedElements := FSizeXDepth - PrevNumElements; // NeuronWeights := FArrNeurons[0].Delta.Size; @@ -8776,16 +9047,16 @@ procedure TNNetConvolution.BackpropagateFastCPU(); begin for OutputY := 0 to MaxY do begin - PrevY := (OutputY*FStride)-FPadding; + PrevY := (OutputY*FStride); for OutputX := 0 to MaxX do begin - PrevX := (OutputX*FStride)-FPadding; + PrevX := (OutputX*FStride); OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY); //TODO: the next line is probably wrong. - if (FCalculatePrevLayerError) then LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); + if (FCalculatePrevLayerError) then LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); CanBackpropOnPos := - (PrevX >= 0) and (PrevY >= 0) and + //(PrevX >= 0) and (PrevY >= 0) and (PrevX < MaxPrevX) and (PrevY < MaxPrevY); for OutputD := 0 to MaxD do @@ -8905,6 +9176,11 @@ procedure TNNetConvolution.BackpropagateFastCPU(); end; end; + if FPadding > 0 then + begin + FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); + end; + if (not FBatchUpdate) then begin for OutputD := 0 to MaxD do FArrNeurons[OutputD].UpdateWeights(FInertia); @@ -8941,9 +9217,18 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; LocalDestPtr := nil; - MaxPrevX := 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX; - MaxPrevY := 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY; - LocalPrevError := FPrevLayer.OutputError; + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; + MaxPrevX := 1 + LocalPrevError.SizeX - FFeatureSizeX; + MaxPrevY := 1 + LocalPrevError.SizeY - FFeatureSizeY; + PrevNumElements := (FSizeXDepth div 4) * 4; PrevMissedElements := FSizeXDepth - PrevNumElements; NeuronWeights := FArrNeurons[0].Delta.Size; @@ -8953,7 +9238,7 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); LocalLearningErrorDerivPtr := Addr(LocalLearningErrorDeriv); for OutputY := 0 to MaxY do begin - PrevY := (OutputY*FStride)-FPadding; + PrevY := (OutputY*FStride); for TileXCnt := 0 to FMaxTileX do begin StartTileX := TileXCnt * FTileSizeX; @@ -8966,10 +9251,10 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); begin for OutputX := StartTileX to EndTileX do begin - PrevX := (OutputX*FStride)-FPadding; + PrevX := (OutputX*FStride); PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); CanBackpropOnPos := - (PrevX >= 0) and (PrevY >= 0) and + //(PrevX >= 0) and (PrevY >= 0) and (PrevX < MaxPrevX) and (PrevY < MaxPrevY); if (FCalculatePrevLayerError and CanBackpropOnPos) then LocalDestPtr := LocalPrevError.GetRawPtr(PrevX, PrevY); @@ -9093,6 +9378,11 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); end; end; + if FPadding > 0 then + begin + FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); + end; + if (not FBatchUpdate) then begin for OutputD := 0 to MaxD do FArrNeurons[OutputD].UpdateWeights(FInertia); @@ -9302,14 +9592,20 @@ constructor TNNetConvolutionAbstract.Create(pFeatureSize, pInputPadding, pStride FPadding := pInputPadding; FStride := Max(pStride,1); FSuppressBias := pSuppressBias; - if FPadding > 0 - then FInputCopy := TNNetVolume.Create; + if FPadding > 0 then + begin + FInputCopy := TNNetVolume.Create; + FPrevLayerErrorPadded := TNNetVolume.Create; + end; end; destructor TNNetConvolutionAbstract.Destroy(); begin - if FPadding > 0 - then FInputCopy.Free; + if FPadding > 0 then + begin + FInputCopy.Free; + FPrevLayerErrorPadded.Free; + end; inherited Destroy(); end; @@ -9937,6 +10233,7 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetIdentityWithoutBackprop' then Result := TNNetIdentityWithoutBackprop.Create() else if S[0] = 'TNNetReLU' then Result := TNNetReLU.Create() else if S[0] = 'TNNetSwish' then Result := TNNetSwish.Create() else + if S[0] = 'TNNetHardSwish' then Result := TNNetHardSwish.Create() else if S[0] = 'TNNetSwish6' then Result := TNNetSwish6.Create() else if S[0] = 'TNNetReLUSqrt' then Result := TNNetReLUSqrt.Create() else if S[0] = 'TNNetReLUL' then Result := TNNetReLUL.Create(St[0], St[1], St[2]) else @@ -9969,11 +10266,13 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetConvolutionGeLU' then Result := TNNetConvolutionGeLU.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionSwish6' then Result := TNNetConvolutionSwish6.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionSwish' then Result := TNNetConvolutionSwish.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionHardSwish' then Result := TNNetConvolutionHardSwish.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionLinear' then Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetGroupedConvolutionLinear' then Result := TNNetGroupedConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else if S[0] = 'TNNetGroupedConvolutionReLU' then Result := TNNetGroupedConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else if S[0] = 'TNNetGroupedPointwiseConvLinear' then Result := TNNetGroupedPointwiseConvLinear.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else if S[0] = 'TNNetGroupedPointwiseConvReLU' then Result := TNNetGroupedPointwiseConvReLU.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else + if S[0] = 'TNNetGroupedPointwiseConvHardSwish' then Result := TNNetGroupedPointwiseConvHardSwish.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else if S[0] = 'TNNetConvolutionSharedWeights' then Result := TNNetConvolutionSharedWeights.Create(FLayers[St[5]]) else if S[0] = 'TNNetDepthwiseConv' then Result := TNNetDepthwiseConv.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetDepthwiseConvReLU' then Result := TNNetDepthwiseConvReLU.Create(St[0], St[1], St[2], St[3]) else @@ -10132,14 +10431,21 @@ function TNNet.AddGroupedConvolution(Conv2d: TNNetConvolutionClass; EachGroupOutput: array of TNNetLayer; GroupCnt: integer; begin - PreviousLayer := GetLastLayer(); + if pInputPadding > 0 then + begin + PreviousLayer := AddLayer( TNNetPad.Create(pInputPadding) ); + end + else + begin + PreviousLayer := GetLastLayer(); + end; Result := PreviousLayer; SetLength(EachGroupOutput, Groups); FeaturesPerGroup := pNumFeatures div Groups; InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Groups; if Groups = 1 then begin - Result := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, pInputPadding, pStride, pSuppressBias) ); + Result := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, {pInputPadding=}0, pStride, pSuppressBias) ); end; if Groups > 1 then begin @@ -10148,7 +10454,7 @@ function TNNet.AddGroupedConvolution(Conv2d: TNNetConvolutionClass; if ChannelInterleaving then AddLayerAfter( TNNetSplitChannelEvery.Create(Groups, GroupCnt), PreviousLayer) else AddLayerAfter( TNNetSplitChannels.Create(GroupCnt*InputChannelsPerGroup, InputChannelsPerGroup), PreviousLayer); - EachGroupOutput[GroupCnt] := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, pInputPadding, pStride, pSuppressBias) ); + EachGroupOutput[GroupCnt] := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, {pInputPadding=}0, pStride, pSuppressBias) ); end; Result := AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); end; @@ -11191,6 +11497,20 @@ procedure TNNet.ClearInertia(); end; end; +procedure TNNet.ClearBias(); +var + LayerCnt: integer; +begin + if FLayers.Count > 1 then + begin + for LayerCnt := 1 to GetLastLayerIdx() do + begin + FLayers[LayerCnt].ClearBias(); + FLayers[LayerCnt].AfterWeightUpdate(); + end; + end; +end; + {$IFDEF OpenCL} procedure TNNet.DisableOpenCL(); var @@ -11501,7 +11821,8 @@ procedure TNNet.DebugStructure(); IntToStr(FLayers[LayerCnt].Output.Depth), ' Learning Rate:',FLayers[LayerCnt].LearningRate:6:4, ' Inertia:',FLayers[LayerCnt].Inertia:4:2, - ' Weight Sum:', FLayers[LayerCnt].GetWeightSum():8:4 + ' Weight Sum:', FLayers[LayerCnt].GetWeightSum():8:4, + ' Bias Sum:', FLayers[LayerCnt].GetBiasSum():8:4 ); if Assigned(FLayers[LayerCnt].PrevLayer) then @@ -11842,6 +12163,9 @@ procedure TNNetLayer.SetPrevLayer(pPrevLayer: TNNetLayer); procedure TNNetLayer.ApplyActivationFunctionToOutput(); var OutputCnt, OutputMax: integer; + {$IFDEF FPC} + x: TNeuralFloat; + {$ENDIF} begin OutputMax := FOutput.Size - 1; if OutputMax >= 0 then @@ -11867,6 +12191,26 @@ procedure TNNetLayer.ApplyActivationFunctionToOutput(); FOutput.CopyRelu(FOutputRaw); end else + if FActivationFn = @HardSwish then + begin + for OutputCnt := 0 to OutputMax do + begin + x := FOutputRaw.FData[OutputCnt]; + if x > 3 then + begin + FOutput.FData[OutputCnt] := x; + end + else if x < -3 then + begin + FOutput.FData[OutputCnt] := 0; + end + else + begin + FOutput.FData[OutputCnt] := x*(x + 3)/6; + end; + end; + end + else begin for OutputCnt := 0 to OutputMax do begin @@ -12580,6 +12924,19 @@ procedure TNNetLayer.MulDeltas(V: TNeuralFloat); AfterWeightUpdate(); end; +procedure TNNetLayer.ClearBias(); +var + Cnt: integer; +begin + if FNeurons.Count > 0 then + begin + for Cnt := 0 to FNeurons.Count-1 do + begin + FNeurons[Cnt].FBiasWeight := 0; + end; + end +end; + procedure TNNetLayer.ClearInertia(); var Cnt: integer; diff --git a/neural/neuralopenclv.pas b/neural/neuralopenclv.pas index 403b5471..056c48d4 100644 --- a/neural/neuralopenclv.pas +++ b/neural/neuralopenclv.pas @@ -24,7 +24,7 @@ interface uses - Classes, SysUtils, neuralopencl, StdCtrls; + Classes, SysUtils, neuralopencl, {$IF CompilerVersion >= 23} VCL.StdCtrls {$ELSE} StdCtrls {$IFEND}; type { TEasyOpenCLCL } diff --git a/neural/neuralthread.pas b/neural/neuralthread.pas index bdbbd77f..cf6f5c0c 100644 --- a/neural/neuralthread.pas +++ b/neural/neuralthread.pas @@ -173,6 +173,38 @@ procedure CreateNeuralThreadListIfRequired(); end; end; +{$IF CompilerVersion <= 23} +// delphi 2010 does not define the following functions and constants +{$IFDEF MSWINDOWS} +const ALL_PROCESSOR_GROUPS = $ffff; + + // + // Structure to represent a group-specific affinity, such as that of a + // thread. Specifies the group number and the affinity within that group. + // +type + KAFFINITY = ULONG_PTR; + _GROUP_AFFINITY = record + Mask: KAFFINITY; + Group: WORD; + Reserved: array[0..2] of WORD; + end; + {$EXTERNALSYM _GROUP_AFFINITY} + GROUP_AFFINITY = _GROUP_AFFINITY; + {$EXTERNALSYM GROUP_AFFINITY} + PGROUP_AFFINITY = ^_GROUP_AFFINITY; + {$EXTERNALSYM PGROUP_AFFINITY} + TGroupAffinity = _GROUP_AFFINITY; + PGroupAffinity = PGROUP_AFFINITY; + +function GetActiveProcessorCount(GroupNumber: WORD): DWORD; stdcall; external 'kernel32.dll'; +function SetThreadGroupAffinity(hThread: THandle; var GroupAffinity: TGroupAffinity; + PreviousGroupAffinity: PGroupAffinity): ByteBool; stdcall; external kernel32 name 'GetThreadGroupAffinity'; +function GetThreadGroupAffinity(hThread: THandle; var GroupAffinity: TGroupAffinity): ByteBool; stdcall; external kernel32 name 'GetThreadGroupAffinity'; +function GetActiveProcessorGroupCount: WORD; stdcall; external kernel32 name 'GetThreadGroupAffinity'; +{$ENDIF} +{$IFEND} + function NeuralDefaultThreadCount: integer; begin {$IFDEF MSWINDOWS} @@ -195,10 +227,14 @@ function GetProcessId(): integer; GetProcessId := {$IFDEF WINDOWS}GetCurrentProcessId(){$ELSE}fpgetppid(){$ENDIF}; end; {$ELSE} -//TODO: properly implement process ID for delphi function GetProcessId(): integer; begin + {$IFDEF WINDOWS} + Result := GetCurrentProcessId() + {$ELSE} + //TODO: properly implement process ID for non windows delphi GetProcessId := Random(MaxInt); + {$ENDIF} end; {$ENDIF} diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index 9c209c65..2d18ba0f 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -41,7 +41,7 @@ interface -uses {$IFDEF FPC}fgl,{$ELSE}Contnrs,{$ENDIF} classes, sysutils; +uses {$IFDEF FPC}fgl,{$ELSE}Contnrs,Generics.Collections,{$ENDIF} classes, sysutils; {$include neuralnetwork.inc} @@ -88,8 +88,10 @@ interface { TVolume } {$IFDEF FPC} + TIntegerList = class (specialize TFPGList); generic TVolume = class(TObject) {$ELSE} + TIntegerList = TList; T = TNeuralFloat; PtrInt = Integer; // This is a hack to allow compilation with other compilers @@ -201,7 +203,7 @@ TVolume = class(TObject) procedure CopyChannels(Original: TVolume; aChannels: array of integer); procedure Define(Original: array of T); function DotProduct(Original: TVolume): T; overload; {$IFDEF Release} inline; {$ENDIF} - class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; + class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} class function Product(PtrA: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} function SumDiff(Original: TVolume): T; {$IFDEF Release} inline; {$ENDIF} procedure DebugDiff(Original: TVolume; Limit: Single = 0); @@ -236,6 +238,12 @@ TVolume = class(TObject) procedure ClearTag(); {$IFDEF Release} inline; {$ENDIF} function NeuralToStr(V: TNeuralFloat): string; + // create lists with positions that are non zeros. + procedure LoadNonZeroPosIntoTIntegerList(Ints: TIntegerList; + IncludePositive: boolean=true; IncludeNegative:boolean = true); + function CreateIntegerListWithNonZeroPos(IncludePositive: boolean=true; + IncludeNegative:boolean = true): TIntegerList; + // Color and Neuronal Weights Transformations procedure RgbImgToNeuronalInput(color_encoding: integer); procedure NeuronalInputToRgbImg(color_encoding: integer); @@ -486,6 +494,8 @@ TNNetKMeans = class(TMObject) TNNetStringList = class(TStringList) public function GetRandomIndex():integer; {$IFDEF Release} inline; {$ENDIF} + procedure KeepFirst(Cnt: integer); + procedure KeepLast(Cnt: integer); procedure DeleteFirst(Cnt: integer); procedure DeleteLast(Cnt: integer); end; @@ -508,19 +518,86 @@ TStringListInt = class(TNNetStringList) { TStringsObj } generic TStringsObj = class(TNNetStringList) private + FSortedList: boolean; function GetList(Index: Integer): TObj; {$IFDEF Release} inline; {$ENDIF} public constructor Create; function AddObject(const S: string; AObject: TObject): Integer; override; procedure FixObjects(); - procedure AddStringObj(const S: string); {$IFDEF Release} inline; {$ENDIF} + property List[Index: Integer]: TObj read GetList; + property SortedList: boolean read FSortedList write FSortedList; + end; + + TStringIntegerList = class (specialize TStringsObj); + + { TStringStringList } + + TStringStringList = class (specialize TStringsObj) + public + procedure LoadFromCsv(filename: string; + SkipFirstLine:boolean = true; + KeyId: integer = -1; + Separator: char = ','); + procedure SaveToCsv(filename: string; + Separator: char = ','); + end; + + TStringVolumeList = class (specialize TStringsObj) + public + function CreateNonZeroPositionLists(): TStringIntegerList; end; - TStringStringList = class (specialize TStringsObj); - TStringVolumeList = class (specialize TStringsObj); TStringStringListVolume = class (specialize TStringsObj); + + {$ELSE} + TStringsObj = class(TNNetStringList) + private + function GetList(Index: Integer): TObject; + function CreateObject: TObject; virtual; abstract; + public + constructor Create; + function AddObject(const S: string; AObject: TObject): Integer; override; + procedure FixObjects(); + + procedure AddStringObj(const S: string); + property List[Index: Integer]: TObject read GetList; + end; + + TStringIntegerList = class (TStringsObj) + private + function GetList(Index: Integer): TIntegerList; + function CreateObject: TObject; override; + public + property List[Index: Integer]: TIntegerList read GetList; + end; + + TStringStringList = class(TStringsObj) + private + function GetList(Index: Integer): TStringList; + function CreateObject: TObject; override; + public + property List[Index: Integer]: TStringList read GetList; + end; + + TStringVolumeList = class(TStringsObj) + private + function GetList(Index: Integer): TNNetVolume; + function CreateObject: TObject; override; + public + function CreateNonZeroPositionLists(): TStringIntegerList; + + property List[Index: Integer]: TNNetVolume read GetList; + end; + + TStringStringListVolume = class(TStringsObj) + private + function GetList(Index: Integer): TStringVolumeList; + function CreateObject: TObject; override; + public + property List[Index: Integer]: TStringVolumeList read GetList; + end; {$ENDIF} { TNNetDictionary } @@ -534,10 +611,18 @@ TNNetDictionary = class(TStringListInt) function AddWordToDictionary(pWord:string): boolean; function AddWordsToDictionary(pString:string): boolean; - + procedure AddWordFromCsvField(filename: string; fieldId: integer; + SkipFirstLine: boolean = True; Separator:char = ','); + procedure RemoveAllStringsWithLessThen(I:integer); function WordToIndex(pWord:string): integer; procedure StringToVolume(pString: string; Volume: TNNetVolume); function VolumeToString(Volume: TNNetVolume; Threshold: TNeuralFloat = 0.2): string; + procedure CsvToTStringVolumeList(filename: string; + GroupByFieldId, DataFieldId: integer; SVL: TStringVolumeList; + SkipFirstLine: boolean = True; Separator:char = ','); + procedure PrintDebug(FirstElements: integer); + procedure SaveDictionaryToFile(Filename: string; Separator:char = ','); + procedure LoadDictionaryFromFile(Filename: string; Separator:char = ','); end; function CreateTokenizedStringList(str: string; c:char):TStringList; overload; @@ -549,6 +634,9 @@ TNNetDictionary = class(TStringListInt) function RectifiedLinearUnit(x: TNeuralFloat): TNeuralFloat; function RectifiedLinearUnitDerivative(x: TNeuralFloat): TNeuralFloat; + function HardSwish(x: TNeuralFloat): TNeuralFloat; + function HardSwishDerivative(x: TNeuralFloat): TNeuralFloat; + function RectifiedLinearUnitLeaky(x: TNeuralFloat): TNeuralFloat; function RectifiedLinearUnitLeakyDerivative(x: TNeuralFloat): TNeuralFloat; @@ -560,7 +648,7 @@ TNNetDictionary = class(TStringListInt) function Swish6Unit(x : TNeuralFloat) : TNeuralFloat; function Swish6Derivative(x : TNeuralFloat) : TNeuralFloat; - function SwishUnit(x : TNeuralFloat) : TNeuralFloat; + function Swish(x: TNeuralFloat): TNeuralFloat; function SwishDerivative(x : TNeuralFloat) : TNeuralFloat; function Sigmoid(x: TNeuralFloat): TNeuralFloat; @@ -1215,7 +1303,7 @@ function NeuralFloatToStr(V: TNeuralFloat): string; Result := FloatToStr(V,LocalFormatSettings); end; -function NeuralStrToFloat(V: string): TNeuralFloat; +function NeuralStrToFloat(V: String): TNeuralFloat; var LocalFormatSettings: TFormatSettings; begin @@ -1578,7 +1666,7 @@ function Swish6Derivative(x : TNeuralFloat) : TNeuralFloat; end; end; -function SwishUnit(x : TNeuralFloat) : TNeuralFloat; +function Swish(x: TNeuralFloat): TNeuralFloat; begin if x < -6 then begin @@ -1586,7 +1674,7 @@ function SwishUnit(x : TNeuralFloat) : TNeuralFloat; end else if x < 6 then begin - Result := x* (1 / ( 1 + Exp(-x) )); + Result := x/( 1 + Exp(-x) ); end else begin @@ -1615,6 +1703,123 @@ function SwishDerivative(x : TNeuralFloat) : TNeuralFloat; end; end; +// https://paperswithcode.com/method/hard-swish +function HardSwish(x: TNeuralFloat): TNeuralFloat; +begin + if x > 3 then + begin + Result := x; + end + else if x < -3 then + begin + Result := 0; + end + else + begin + Result := x*(x + 3)/6; + end; +end; + +function HardSwishDerivative(x: TNeuralFloat): TNeuralFloat; +begin + if x<-3 then + begin + Result := 0; + end + else if x>3 then + begin + Result := 1; + end + else + begin + Result := 0.3333*x + 0.5; + end; +end; + +{$IFDEF FPC} +{ TStringStringList } + +procedure TStringStringList.LoadFromCsv(filename: string; + SkipFirstLine:boolean = true; + KeyId: integer = -1; + Separator: char = ','); +var + Sep: TStringList; + CurrentLine: string; + KeyStr: string; + FileHandler: TextFile; + LineCnt: integer; +begin + Self.Sorted := false; + Self.SortedList := false; + AssignFile(FileHandler, filename); + Reset(FileHandler); + LineCnt := 0; + while (not Eof(FileHandler)) do // and (LineCnt<10000) + begin + ReadLn(FileHandler, CurrentLine); + if not( (LineCnt = 0) and (SkipFirstLine) ) then + begin + Sep := CreateTokenizedStringList(Separator); + Sep.DelimitedText := CurrentLine; + if (KeyId = -1) then + begin + KeyStr := IntToStr(LineCnt); + end + else + begin + KeyStr := Sep[KeyId]; + end; + AddObject(KeyStr, TObject(Sep)); + end; + LineCnt := LineCnt + 1; + // debug line only: + //if LineCnt mod 100000 = 0 then WriteLn(LineCnt); + end; + CloseFile(FileHandler); +end; + +procedure TStringStringList.SaveToCsv(filename: string; + Separator: char = ','); +var + RowCnt: integer; + MaxCnt: integer; + FileHandler: TextFile; +begin + MaxCnt := Count - 1; + if MaxCnt > -1 then + begin + AssignFile(FileHandler, filename); + ReWrite(FileHandler); + for RowCnt := 0 to MaxCnt do + begin + List[RowCnt].Delimiter := Separator; + WriteLn(FileHandler, List[RowCnt].DelimitedText); + end; + CloseFile(FileHandler); + end; +end; + +{$ENDIF} + +{ TStringVolumeList } + +function TStringVolumeList.CreateNonZeroPositionLists: TStringIntegerList; +var + ElementCnt: integer; + MaxCnt: integer; +begin + Result := TStringIntegerList.Create; + if Count > 0 then + begin + MaxCnt := Count - 1; + for ElementCnt := 0 to MaxCnt do + begin + Result.AddObject(Self[ElementCnt], Self.List[ElementCnt].CreateIntegerListWithNonZeroPos() ); + end; + end; +end; + constructor TNNetVolumePair.Create(); begin inherited Create(); @@ -1661,6 +1866,16 @@ function TNNetStringList.GetRandomIndex(): integer; end; end; +procedure TNNetStringList.KeepFirst(Cnt: integer); +begin + DeleteLast(Count-Cnt); +end; + +procedure TNNetStringList.KeepLast(Cnt: integer); +begin + DeleteFirst(Count-Cnt); +end; + procedure TNNetStringList.DeleteFirst(Cnt: integer); var I: integer; @@ -1701,6 +1916,7 @@ constructor TStringsObj.Create; inherited Create; Self.OwnsObjects := true; Self.Sorted := true; + Self.FSortedList := true; end; function TStringsObj.AddObject(const S: string; AObject: TObject): Integer; @@ -1710,7 +1926,7 @@ function TStringsObj.AddObject(const S: string; AObject: TObject): Integer; AObject := TObj.Create; end; - if AObject is TStringList then + if (FSortedList) and (AObject is TStringList) then begin TStringList(AObject).Sorted := true; end; @@ -1731,7 +1947,7 @@ procedure TStringsObj.FixObjects(); Self.Objects[ElementId] := TObj.Create; end; - if Self.Objects[ElementId] is TStringList then + if (FSortedList) and (Self.Objects[ElementId] is TStringList) then begin TStringList(Self.Objects[ElementId]).Sorted := true; end; @@ -1743,6 +1959,105 @@ procedure TStringsObj.AddStringObj(const S: string); begin Self.AddObject(S, TObj.Create); end; +{$ELSE} +function TStringsObj.GetList(Index: Integer): TObject; +begin + Result := Self.Objects[Index]; +end; + +constructor TStringsObj.Create; +begin + inherited Create; + Self.OwnsObjects := true; + Self.Sorted := true; +end; + +function TStringsObj.AddObject(const S: string; AObject: TObject): Integer; +begin + if not Assigned(AObject) then + begin + AObject := CreateObject; + end; + + if AObject is TStringList then + begin + TStringList(AObject).Sorted := true; + end; + + Result := inherited AddObject(S, AObject); +end; + +procedure TStringsObj.FixObjects(); +var + ElementId: integer; +begin + if Count > 0 then + begin + for ElementId := 0 to Count - 1 do + begin + if not Assigned(Self.List[ElementId]) then + begin + Self.Objects[ElementId] := CreateObject; + end; + + if Self.Objects[ElementId] is TStringList then + begin + TStringList(Self.Objects[ElementId]).Sorted := true; + end; + end; + end; +end; + +procedure TStringsObj.AddStringObj(const S: string); +begin + Self.AddObject(S, CreateObject); +end; + +{ TStringStringList } +function TStringStringList.CreateObject: TObject; +begin + Result := TStringList.Create(); +end; + +function TStringStringList.GetList(Index: Integer): TStringList; +begin + Result := TStringList(inherited GetList(Index) ); +end; + +{ TStringVolumeList } +function TStringVolumeList.CreateObject: TObject; +begin + Result := TNNetVolume.Create(); +end; + +function TStringVolumeList.GetList(Index: Integer): TNNetVolume; +begin + Result := TNNetVolume(inherited GetList(Index) ); +end; + +{ TStringStringListVolume } +function TStringStringListVolume.CreateObject: TObject; +begin + Result := TStringVolumeList.Create; +end; + +function TStringStringListVolume.GetList(Index: Integer): TStringVolumeList; +begin + Result := TStringVolumeList(inherited GetList(Index) ); +end; + +{ TStringIntegerList } + +function TStringIntegerList.CreateObject: TObject; +begin + Result := TIntegerList.Create(); +end; + +function TStringIntegerList.GetList(Index: Integer): TIntegerList; +begin + Result := TIntegerList(inherited GetList(Index) ); +end; + {$ENDIF} { TStringListInt } @@ -1838,6 +2153,67 @@ function TNNetDictionary.AddWordsToDictionary(pString: string): boolean; end; end; +procedure TNNetDictionary.AddWordFromCsvField(filename: string; fieldId: integer + ; SkipFirstLine: boolean = True; Separator:char = ','); +var + Sep: TStringList; + CurrentLine: string; + WordToAdd: string; + FileHandler: TextFile; + LineCnt: integer; +begin + Sep := CreateTokenizedStringList(Separator); + AssignFile(FileHandler, filename); + Reset(FileHandler); + LineCnt := 0; + while not Eof(FileHandler) do + begin + ReadLn(FileHandler, CurrentLine); + if not( (LineCnt = 0) and (SkipFirstLine) ) then + begin + Sep.DelimitedText := CurrentLine; + if Sep.Count > fieldId then + begin + WordToAdd := Sep[fieldId]; + {$IFDEF FPC} + AddWordToDictionary(TrimSet(WordToAdd,['"',' '])); + {$ELSE} + AddWordToDictionary(Trim(WordToAdd)); + {$ENDIF} + end; + end; + LineCnt := LineCnt + 1; + //Debug line: + //if LineCnt mod 100000 = 0 then WriteLn(LineCnt); + end; + CloseFile(FileHandler); + Sep.Free; +end; + +procedure TNNetDictionary.RemoveAllStringsWithLessThen(I: integer); +var + MaxPos, CurrentPos: integer; +begin + MaxPos := Count - 1; + if MaxPos > -1 then + begin + Self.Sorted := false; + Self.SortByIntegerDesc; + CurrentPos := 0; + while CurrentPos <= MaxPos do + begin + if Self.Integers[CurrentPos] < I then + begin + Self.KeepFirst(CurrentPos); + MaxPos := -1; // exit the while loop + end; + CurrentPos := CurrentPos + 1; + end; + Self.Sort; + Self.Sorted := true; + end; +end; + function TNNetDictionary.WordToIndex(pWord: string): integer; begin if not(Self.Find(pWord, Result)) then Result := -1; @@ -1895,6 +2271,133 @@ function TNNetDictionary.VolumeToString(Volume: TNNetVolume; Result := FTokenizer.DelimitedText; end; +procedure TNNetDictionary.CsvToTStringVolumeList(filename: string; + GroupByFieldId, DataFieldId: integer; SVL: TStringVolumeList; + SkipFirstLine: boolean = True; Separator:char = ','); +var + Sep: TStringList; + CurrentLine: string; + KeyStr, DataStr: string; + DataId, KeyId: integer; + FileHandler: TextFile; + LineCnt: integer; + V: TNNetVolume; +begin + Sep := CreateTokenizedStringList(Separator); + AssignFile(FileHandler, filename); + Reset(FileHandler); + LineCnt := 0; + while not Eof(FileHandler) do + begin + ReadLn(FileHandler, CurrentLine); + if not( (LineCnt = 0) and (SkipFirstLine) ) then + begin + Sep.DelimitedText := CurrentLine; + if (Sep.Count > GroupByFieldId) and (Sep.Count > DataFieldId) then + begin + KeyStr := Sep[GroupByFieldId]; + DataStr := Sep[DataFieldId]; + DataId := IndexOf(DataStr); + if DataId > -1 then + begin + KeyId := SVL.IndexOf(KeyStr); + if KeyId > -1 then + begin + V := SVL.List[KeyId]; + V.FData[DataId] := 1; + end + else + begin + V := TNNetVolume.Create(Count); + V.FData[DataId] := 1; + SVL.AddObject(KeyStr, V); + end; + end; + end; + end; + LineCnt := LineCnt + 1; + // debug line only: + //if LineCnt mod 100000 = 0 then WriteLn(LineCnt); + end; + CloseFile(FileHandler); + Sep.Free; +end; + +procedure TNNetDictionary.PrintDebug(FirstElements: integer); +var + ElementCnt: integer; +begin + WriteLn('Number of elements: ', Count); + if Count > 0 then + begin + if FirstElements > Count then FirstElements := Count; + WriteLn('Showing first ',FirstElements,' elements.'); + for ElementCnt := 0 to FirstElements - 1 do + begin + WriteLn(ElementCnt,': ',Self[ElementCnt],' -> ', Self.Integers[ElementCnt]); + end; + end; +end; + +procedure TNNetDictionary.SaveDictionaryToFile(Filename: string; Separator: char + ); +var + RowCnt: integer; + MaxCnt: integer; + FileHandler: TextFile; +begin + MaxCnt := Count - 1; + if MaxCnt > -1 then + begin + AssignFile(FileHandler, Filename); + ReWrite(FileHandler); + for RowCnt := 0 to MaxCnt do + begin + WriteLn(FileHandler, Self[RowCnt]+Separator+IntToStr(Self.Integers[RowCnt])); + end; + CloseFile(FileHandler); + end; +end; + +procedure TNNetDictionary.LoadDictionaryFromFile(Filename: string; + Separator: char); +var + Sep: TStringList; + CurrentLine: string; + Word: string; + WordCount: string; + FileHandler: TextFile; +begin + Clear; + Sep := CreateTokenizedStringList(Separator); + AssignFile(FileHandler, Filename); + Reset(FileHandler); + while not Eof(FileHandler) do + begin + ReadLn(FileHandler, CurrentLine); + Sep.DelimitedText := CurrentLine; + if Sep.Count = 2 then + begin + {$IFDEF Debug} + Word := Sep[0]; + WordCount := Sep[1]; + Self.AddInteger(Word,StrToInt(WordCount)); + {$ELSE} + Self.AddInteger(Sep[0],StrToInt(Sep[1])); + {$ENDIF} + end + else + begin + raise Exception.Create('Bad dictionary entry:' + CurrentLine); + end; + + // debug line only: + //if LineCnt mod 100000 = 0 then WriteLn(LineCnt); + end; + CloseFile(FileHandler); + Sep.Free; +end; + { TNNetKMeans } constructor TNNetKMeans.Create(pVolNum, pSizeX, pSizeY, pDepth: integer; pManhattan: boolean = true); begin @@ -4353,6 +4856,30 @@ function TVolume.NeuralToStr(V: TNeuralFloat): string; Result := FloatToStr(V, FFormatSettings); end; +procedure TVolume.LoadNonZeroPosIntoTIntegerList(Ints: TIntegerList; + IncludePositive: boolean=true; IncludeNegative:boolean = true); +var + I: integer; + vHigh: integer; + Value: TNeuralFloat; +begin + vHigh := High(FData); + for I := 0 to vHigh do + begin + Value := FData[I]; + if IncludePositive and (value > 0) then Ints.Add(I) + else if IncludeNegative and (value < 0) then Ints.Add(I); + end; +end; + +function TVolume.CreateIntegerListWithNonZeroPos(IncludePositive: boolean; + IncludeNegative: boolean): TIntegerList; +begin + Result := TIntegerList.Create(); + LoadNonZeroPosIntoTIntegerList(Result, IncludePositive, IncludeNegative); +end; + + procedure TVolume.RgbImgToNeuronalInput(color_encoding: integer); begin // In all color encodings, values vary from -2 to 2. diff --git a/neural/neuralvolumev.pas b/neural/neuralvolumev.pas index b77102d7..a2f712b2 100644 --- a/neural/neuralvolumev.pas +++ b/neural/neuralvolumev.pas @@ -24,8 +24,9 @@ interface uses - Classes, SysUtils, ExtCtrls, Graphics, neuralvolume, - {$IFDEF FPC}LCLType, FPImage {$ELSE}Windows{$ENDIF} ; + Classes, SysUtils, neuralvolume, + {$IFDEF FPC}ExtCtrls, Graphics, LCLType, FPImage + {$ELSE} Windows, {$IF CompilerVersion >= 23} VCL.ExtCtrls, VCL.Graphics {$ELSE} ExtCtrls, Graphics {$ENDIF} {$ENDIF}; /// saves a bitmap into a file from a handle HWND procedure SaveHandleToBitmap(OutputFileName: string; hWnd: HWND); @@ -40,14 +41,14 @@ procedure LoadRGBVolumeIntoTImage(V:TNNetVolume; Image:TImage); procedure LoadPictureIntoVolume(LocalPicture: TPicture; Vol:TNNetVolume); {$IFDEF Release} inline; {$ENDIF} /// Loads a Bitmat into a Volume -procedure LoadBitmapIntoVolume(LocalBitmap: Graphics.TBitmap; Vol:TNNetVolume); +procedure LoadBitmapIntoVolume(LocalBitmap: TBitmap; Vol:TNNetVolume); {$IFNDEF FPC} procedure LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume); {$ENDIF} implementation -uses {$IFDEF FPC}LCLIntf,{$ENDIF}Math; +{$IFDEF FPC}uses LCLIntf;{$ENDIF} procedure SaveHandleToBitmap(OutputFileName: string; hWnd: HWND); {$IFDEF FPC} @@ -67,13 +68,13 @@ procedure SaveHandleToBitmap(OutputFileName: string; hWnd: HWND); end; {$ELSE} var - MyBitmap: Graphics.TBitmap; + MyBitmap: TBitmap; MyDC : HDC; pRect : TRect; w,h : integer; begin MyDC := GetDC(hWnd); - MyBitmap := Graphics.TBitmap.Create; + MyBitmap := TBitmap.Create; try GetWindowRect(HWND,pRect); w := pRect.Right - pRect.Left; @@ -132,7 +133,7 @@ procedure LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume); end; {$ENDIF} -procedure LoadBitmapIntoVolume(LocalBitmap: Graphics.TBitmap; Vol: TNNetVolume); +procedure LoadBitmapIntoVolume(LocalBitmap: TBitmap; Vol: TNNetVolume); var CountX, CountY, MaxX, MaxY: integer; LocalCanvas: TCanvas; From abda938216b8834f4c6074e3555dbbb2728130c9 Mon Sep 17 00:00:00 2001 From: mikerabat Date: Tue, 17 Oct 2023 15:30:11 +0200 Subject: [PATCH 07/13] New protected methods that are called when starting the fitting, after each epoch and after each step -> the newly introduced method just checks for the callback routines. Fixed warnings --- neural/neuralfit.pas | 37 ++++++++++++++++++++++++++++++------- neural/neuralvolume.pas | 6 ++++-- 2 files changed, 34 insertions(+), 9 deletions(-) diff --git a/neural/neuralfit.pas b/neural/neuralfit.pas index 628cfef4..bab0cd2d 100644 --- a/neural/neuralfit.pas +++ b/neural/neuralfit.pas @@ -93,6 +93,10 @@ TNeuralFitBase = class(TMObject) {$ENDIF} FProcs: TNeuralThreadList; procedure CheckLearningRate(iEpochCount: integer); + protected + procedure DoAfterEpoch; virtual; + procedure DoAfterStep; virtual; + procedure DoOnStart; virtual; public constructor Create(); destructor Destroy(); override; @@ -616,7 +620,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, begin MessageProc('Computing...'); end; - if Assigned(FOnStart) then FOnStart(Self); + DoOnStart; globalStartTime := Now(); while ( (FMaxEpochs > FCurrentEpoch) and Not(FShouldQuit) ) do begin @@ -670,7 +674,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, startTime := Now(); end; - if Assigned(FOnAfterStep) then FOnAfterStep(Self); + DoAfterStep; Inc(FCurrentStep); end; @@ -807,7 +811,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, 'Epochs: '+IntToStr(FCurrentEpoch)+ '. Working time: '+FloatToStrF(Round((Now() - globalStartTime)*2400)/100,ffFixed,4,2)+' hours.'); - if Assigned(FOnAfterEpoch) then FOnAfterEpoch(Self); + DoAfterEpoch; end; if TestBestAtEnd and @@ -880,7 +884,7 @@ function TNeuralFit.FitTrainingPair(Idx: integer; ThreadId: integer): TNNetVolum var ElementIdx: integer; begin - ElementIdx := Random(FTrainingVolumes.Count); + ElementIdx := Random(FTrainingVolumes.Count); // this is not thread save! FitTrainingPair := FTrainingVolumes[ElementIdx]; end; @@ -1516,6 +1520,24 @@ destructor TNeuralFitBase.Destroy(); inherited Destroy(); end; +procedure TNeuralFitBase.DoAfterEpoch; +begin + if Assigned(FOnAfterEpoch) then + fOnAfterEpoch(self); +end; + +procedure TNeuralFitBase.DoOnStart; +begin + if Assigned(FOnStart) then + FOnStart(self); +end; + +procedure TNeuralFitBase.DoAfterStep; +begin + if Assigned(FOnAfterStep) then + fOnAfterStep(self); +end; + procedure TNeuralFitBase.WaitUntilFinished; begin FShouldQuit := true; @@ -1771,7 +1793,8 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; begin MessageProc('Computing...'); end; - if Assigned(FOnStart) then FOnStart(Self); + DoOnStart; + globalStartTime := Now(); while ( (FMaxEpochs > FCurrentEpoch) and Not(FShouldQuit) ) do begin @@ -1855,7 +1878,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; startTime := Now(); end; - if Assigned(FOnAfterStep) then FOnAfterStep(Self); + DoAfterStep; Inc(FCurrentStep); end; // of epoch {$IFDEF Debug} @@ -1997,7 +2020,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; Round( (Now() - globalStartTime) * 24 * 60 * 60),',,,' ); end; - if Assigned(FOnAfterEpoch) then FOnAfterEpoch(Self); + DoAfterEpoch; CloseFile(CSVFile); AssignFile(CSVFile, FileNameCSV); diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index 2d18ba0f..f7ce2819 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -203,7 +203,7 @@ TVolume = class(TObject) procedure CopyChannels(Original: TVolume; aChannels: array of integer); procedure Define(Original: array of T); function DotProduct(Original: TVolume): T; overload; {$IFDEF Release} inline; {$ENDIF} - class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} + class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; class function Product(PtrA: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} function SumDiff(Original: TVolume): T; {$IFDEF Release} inline; {$ENDIF} procedure DebugDiff(Original: TVolume; Limit: Single = 0); @@ -700,7 +700,7 @@ implementation {$ENDIF} uses {$IFNDEF x64} Neural.AVX {$ELSE} Neural.AVXx64{$ENDIF}, neuralbit, - Math, CPUFeatures; + CPUFeatures; function CreateTokenizedStringList(str: string; c:char):TStringList; begin @@ -2364,8 +2364,10 @@ procedure TNNetDictionary.LoadDictionaryFromFile(Filename: string; var Sep: TStringList; CurrentLine: string; + {$IFDEF DEBUG} Word: string; WordCount: string; + {$ENDIF} FileHandler: TextFile; begin Clear; From 454c25e467f802de0e831bf8ced76bf9782004fe Mon Sep 17 00:00:00 2001 From: mikerabat Date: Mon, 6 Nov 2023 10:16:27 +0100 Subject: [PATCH 08/13] Fix: Range check error (or negative number) on the examples seen output if the number of examples seen exceeds 2e9 (1 shl 31) examples --- neural/neuralfit.pas | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/neural/neuralfit.pas b/neural/neuralfit.pas index bab0cd2d..7fb44a97 100644 --- a/neural/neuralfit.pas +++ b/neural/neuralfit.pas @@ -559,7 +559,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, begin FMessageProc( 'Epochs: ' + IntToStr(FCurrentEpoch) + - ' Examples seen:' + IntToStr(FCurrentEpoch * TrainingCnt) + + ' Examples seen:' + IntToStr(Int64(FCurrentEpoch) * Int64(TrainingCnt)) + ' Test Accuracy: ' + FloatToStrF(TestRate,ffFixed,6,4) + ' Test Error: ' + FloatToStrF(TestError,ffFixed,6,4) + ' Test Loss: ' + FloatToStrF(TestLoss,ffFixed,6,4) + @@ -662,7 +662,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, totalTimeSeconds := (Now() - startTime) * 24 * 60 * 60; if FVerbose then MessageProc ( - IntToStr((FGlobalHit + FGlobalMiss)*I + FCurrentEpoch*TrainingCnt) + + IntToStr(Int64((FGlobalHit + FGlobalMiss))*Int64(I) + Int64(FCurrentEpoch)*Int64(TrainingCnt)) + ' Examples seen. Accuracy: ' + FloatToStrF(FTrainingAccuracy,ffFixed,6,4) + ' Error: ' + FloatToStrF(TrainingError,ffFixed,10,5) + ' Loss: ' + FloatToStrF(TrainingLoss,ffFixed,7,5) + @@ -722,7 +722,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, begin FMessageProc( 'Epochs: ' + IntToStr(FCurrentEpoch) + - ' Examples seen:' + IntToStr(FCurrentEpoch * TrainingCnt) + + ' Examples seen:' + IntToStr(Int64(FCurrentEpoch) * Int64(TrainingCnt)) + ' Validation Accuracy: ' + FloatToStrF(ValidationRate,ffFixed,6,4) + ' Validation Error: ' + FloatToStrF(ValidationError,ffFixed,6,4) + ' Validation Loss: ' + FloatToStrF(ValidationLoss,ffFixed,6,4) + @@ -1672,7 +1672,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; begin FMessageProc( 'Epochs: ' + IntToStr(FCurrentEpoch) + - ' Examples seen:' + IntToStr(FCurrentEpoch * FImgVolumes.Count) + + ' Examples seen:' + IntToStr(Int64(FCurrentEpoch) * Int64(FImgVolumes.Count)) + ' Test Accuracy: ' + FloatToStrF(TestRate,ffFixed,6,4) + ' Test Error: ' + FloatToStrF(TestError,ffFixed,6,4) + ' Test Loss: ' + FloatToStrF(TestLoss,ffFixed,6,4) + @@ -1866,7 +1866,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; totalTimeSeconds := (Now() - startTime) * 24 * 60 * 60; if FVerbose then MessageProc ( - IntToStr((FGlobalHit + FGlobalMiss) * I + FCurrentEpoch*FImgVolumes.Count) + + IntToStr(Int64(FGlobalHit + FGlobalMiss) * Int64(I) + Int64(FCurrentEpoch)*Int64(FImgVolumes.Count)) + ' Examples seen. Accuracy: ' + FloatToStrF(FTrainingAccuracy,ffFixed,6,4) + ' Error: ' + FloatToStrF(TrainingError,ffFixed,10,5) + ' Loss: ' + FloatToStrF(TrainingLoss,ffFixed,7,5) + @@ -1943,7 +1943,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; begin FMessageProc( 'Epochs: ' + IntToStr(FCurrentEpoch) + - ' Examples seen:' + IntToStr(FCurrentEpoch * FImgVolumes.Count) + + ' Examples seen:' + IntToStr(Int64(FCurrentEpoch) * Int64(FImgVolumes.Count)) + ' Validation Accuracy: ' + FloatToStrF(ValidationRate,ffFixed,6,4) + ' Validation Error: ' + FloatToStrF(ValidationError,ffFixed,6,4) + ' Validation Loss: ' + FloatToStrF(ValidationLoss,ffFixed,6,4) + From 12d54860d442e4c283d38c0615c3677e6612f4d0 Mon Sep 17 00:00:00 2001 From: Michael Rabatscher Date: Thu, 25 Apr 2024 16:47:44 +0200 Subject: [PATCH 09/13] Updated to Jaopaolos reference up until 25.04.2024 -> some changes are still there: * removed unnecessary extra field formatsettings in TVolume -> made a global variable (memory spare ;) ) * fixed many compile warnings and messages. D2010 does not report any --- README.md | 104 +- .../CaiOptimizedDenseNet.lpr | 2 +- .../CaiOptimizedDenseNet48.lpr | 2 +- .../kOptimizedDenseNet.lpr | 2 +- examples/Hypotenuse/README.md | 3 + .../IdentityShortcutConnection.pas | 2 +- examples/IdentityShortcutConnection/README.md | 6 +- .../ImageClassifierSELU.lpr | 2 +- .../MalariaImageClassification.pas | 2 +- examples/ResNet/CaiResNet20.lpr | 2 +- examples/ResNet/ResNet20.lpr | 2 +- .../SeparableConvolution.lpr | 2 +- .../SimpleFashionMNIST/SimpleFashionMNIST.lpr | 62 +- examples/SimpleImageClassifier/README.md | 80 +- .../SimpleImageClassifier.lpr | 2 +- .../SimpleImageClassifierGroupedConv.lpr | 18 +- .../SimpleImageClassifierPaddingCropping.lpr | 4 +- ...pleImageClassifierPaddingCroppingSwish.lpr | 4 +- .../SimpleImageClassifierReLU6.lpr | 4 +- .../SimpleImageClassifierResize48.lpr | 4 +- .../SimpleImageClassifierResize64.lpr | 4 +- .../SimpleImageClassifierSharedWeights.lpr | 2 +- .../SimpleImageClassifierSwish.lpr | 4 +- .../SimpleImageClassifierGPU.lpr | 2 +- .../SimpleImageClassifierParallel.pas | 2 +- examples/SimpleMNist/SimpleMNist.lpr | 39 +- .../SimplePlantLeafDisease.ipynb | 7895 +---------------- .../SimplePlantLeafDisease.pas | 2 +- .../SimplePlantLeafDiseaseLoading.pas | 2 +- .../SimplePlantLeafDiseaseLoadingAPI.pas | 2 +- .../SimplePlantLeafDiseaseParallel.pas | 2 +- .../SimpleTinyImageNet/SimpleTinyImageNet.pas | 2 +- examples/SuperResolution/README.md | 4 +- .../uvisualautoencodertinyimagenet.lfm | 2 +- neural/neuraldatasets.pas | 155 +- neural/neuralfit.pas | 317 +- neural/neuralnetwork.pas | 2362 ++++- neural/neuralthread.pas | 7 + neural/neuralvolume.pas | 1326 ++- neural/neuralvolumev.pas | 2 +- 40 files changed, 4428 insertions(+), 8014 deletions(-) diff --git a/README.md b/README.md index 25cdba8a..da667637 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,15 @@ OpenCL capable devices including AMD, Intel and NVIDIA. This API has been tested This project is a subproject from a bigger and older project called [CAI](https://sourceforge.net/projects/cai/) and is sister to Keras based [K-CAI NEURAL API](https://github.com/joaopauloschuler/k-neural-api). You can find trained neural network models in the [pre-trained-neural-api-networks](https://github.com/joaopauloschuler/pre-trained-neural-api-networks/) repository. +## Intro Videos +[![Watch the video](https://img.youtube.com/vi/aIy1S7clhQo/0.jpg)](https://youtu.be/aIy1S7clhQo) | [![Watch the video](https://img.youtube.com/vi/q56NcgUiAAk/0.jpg)](https://youtu.be/q56NcgUiAAk) | [![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) +--------------------------- | ------------------------------------- | ------------------------- +Basics of Neural Networks in Pascal - Loading and Saving | Neural Networks for Absolute Beginners! Learning a Simple Function | Coding a Neural Network in Pascal that Learns to Calculate the Hypotenuse + ## Why Pascal? -* Compiled pascal code is super fast! This API can outperform some major APIs in some architectures. -* Pascal is easy to learn and easy to make a readable and understandable source code. You'll be able to make super fast **native** code and at the same time have a readable code. +* The Pascal computer language is easy to learn. Pascal allows developers to make a readable and understandable source code. +* You'll be able to make super-fast **native code** and at the same time have a readable code. +* This API can outperform some major APIs in some architectures. ## Prerequisites You'll need [Lazarus](https://www.lazarus-ide.org/) development environment. If you have an OpenCL capable device, you'll need its OpenCL drivers. Many examples use the [CIFAR-10](https://www.cs.toronto.edu/~kriz/cifar.html) dataset. You'll also find examples for the [CIFAR-100](https://www.cs.toronto.edu/~kriz/cifar.html), [MNIST](http://yann.lecun.com/exdb/mnist/), [Fashion MNIST](https://www.kaggle.com/zalando-research/fashionmnist) and the [Places365-Standard Small images 256x256](http://places2.csail.mit.edu/download.html) dataset. @@ -17,6 +23,11 @@ This project is [Lazarus](https://www.lazarus-ide.org/) based. That said, as of ## Installation Clone this project, add the [**neural**](https://github.com/joaopauloschuler/neural-api/tree/master/neural) folder to your [Lazarus](https://www.lazarus-ide.org/) unit search path and you'll be ready to go! + +## A.I. Powered Support +You can get A.I. powered help from these tools: +* [CAI Neural API support at ChatGPT4](https://chat.openai.com/g/g-bqMxEDpIg-neural-api-free-pascal-developer). +* [CAI Neural API support at Poe](https://poe.com/CAI-NEURAL-API). ## Documentation The documentation is composed by: @@ -34,16 +45,88 @@ In this readme file, you’ll find information about: * Other scientific publications from the same author. ### Easy Examples First Please! -Some recommended introductory source code examples are: +[![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) + +You can click on the image above to watch the video. + +Assuming that you would like to train a neural network to learn a function that has 2 inputs and one output, you could start with something like this: +``` + NN.AddLayer([ + TNNetInput.Create(2), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectLinear.Create(1) + ]); +``` +The example above has 2 inputs (`TNNetInput`), 2 dense layers (`TNNetFullConnectReLU`) with 32 neurons each and one output (`TNNetFullConnectLinear`). + +You can learn more about how to build and train simple neural networks at the following source code examples: +* [Only one neuron](https://github.com/joaopauloschuler/neural-api/tree/master/examples/OnlyOneNeuron). * [Training a neural network to learn the hypotenuse function](https://github.com/joaopauloschuler/neural-api/tree/master/examples/Hypotenuse) * [Training a neural network to learn the hypotenuse function with FitLoading](https://github.com/joaopauloschuler/neural-api/tree/master/examples/HypotenuseFitLoading) * [Training a neural network to learn boolean functions AND, OR and XOR with neuralfit unit](https://github.com/joaopauloschuler/neural-api/tree/master/examples/XorAndOr) * [Training a neural network to learn boolean functions AND, OR and XOR without neuralfit unit](https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/supersimple/supersimple.lpr) +### Loading and Saving Neural Networks +Loading is very easy: +``` + NN := TNNet.Create; + NN.LoadFromFile('MyTrainedNeuralNetwork.nn'); +``` +Saving is as easy: + +``` + NN.SaveToFile('MyTrainedNeuralNetwork.nn'); +``` + +### NLP - Training a Simple Neural Network Model for Text Generation +This [NLP source code example](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SimpleNLP) shows a (hello world) small neural network trained on the [Tiny Stories dataset](https://huggingface.co/datasets/roneneldan/TinyStories). This code + +``` + WriteLn(GenerateStringFromChars(NFit.NN, 'once', FSampler),'.'); + WriteLn(GenerateStringFromChars(NFit.NN, 'one ', FSampler),'.'); +``` + +produces this output: +``` +once upon a time, there was a little girl named lily. she loved to play outside i. +one day, a little girl named lily was playing in her garden. she saw a big car wi. +``` + +You can open on colab the raw training file and run it by yourself at: +https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/SimpleNLP/NLP_CAI_TinyStories_Simple_Example.ipynb + +#### Creating Your Own Chat Bot +Once your neural network is trained, you can run your own chat bot with: +``` +var + S: string; + oSampler: TNNetSamplerBase; + NN: TNNet; +begin + oSampler := TNNetSamplerTopP.Create(0.6); + NN := TNNet.Create(); + WriteLn('Loading neural network.'); + NN.LoadFromFile('MyNeuralNetwork.nn'); + NN.DebugStructure(); + WriteLn(); + WriteLn('Write something and I will reply.'); + repeat + Write('User: '); + ReadLn(S); + WriteLn('Neural network: ',GenerateStringFromChars(NN, LowerCase(S), oSampler),'.'); + until S = 'exit'; + NN.Free; + oSampler.Free; +end; +``` + ### Simple Image Classification Examples -#### How Does the Code Look like for an Image Classification (CIFAR-10) Example? -This is an example for image classification: +#### CIFAR-10 Image Classification Example +The CIFAR-10 dataset is a well-known collection of images commonly used to train machine learning and computer vision algorithms. It was created by the Canadian Institute for Advanced Research (CIFAR). It contains 60K 32x32 color images. The images are classified into 10 different classes, with 6,000 images per class. The classes represent airplanes, cars, birds, cats, deer, dogs, frogs, horses, ships, and trucks. Despite its relatively low resolution and small size, CIFAR-10 can be challenging for models to achieve high accuracy, making it a good dataset for testing advancements in machine learning techniques. + +Follows a source code example for the CIFAR-10 image classification: ``` NN := TNNet.Create(); NN.AddLayer([ @@ -101,10 +184,13 @@ You can save and load trained models (neural networks) with `TNNet.SaveToFile` a ``` ### Youtube Videos -There are some available videos: -* [Increasing Image Resolution with Neural Networks](https://www.youtube.com/watch?v=jdFixaZ2P4w) -* [Ultra Fast Single Precision Floating Point Computing](https://www.youtube.com/watch?v=qGnfwpKUTIQ) -* [AVX and AVX2 Code Optimization](https://www.youtube.com/watch?v=Pnv174V_emw) +[![Watch the video](https://img.youtube.com/vi/aIy1S7clhQo/0.jpg)](https://youtu.be/aIy1S7clhQo) | [![Watch the video](https://img.youtube.com/vi/q56NcgUiAAk/0.jpg)](https://youtu.be/q56NcgUiAAk) | [![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) +--------------------------- | ------------------------------------- | ------------------------- +Basics of Neural Networks in Pascal - Loading and Saving | Neural Networks for Absolute Beginners! Learning a Simple Function | Coding a Neural Network in Pascal that Learns to Calculate the Hypotenuse +[![Watch the video](https://img.youtube.com/vi/tODsv6Ks2DM/0.jpg)](https://youtu.be/tODsv6Ks2DM) | [![Watch the video](https://img.youtube.com/vi/f4T9IB-He_k/0.jpg)](https://youtu.be/f4T9IB-He_k) | [![Watch the video](https://img.youtube.com/vi/o-8NuoSsdck/0.jpg)](https://youtu.be/o-8NuoSsdck) +Pre-trained Neural Networks & Transfer Learning with Pascal's CAI Neural API | Coding a Neural Network in Pascal that Learns the OR Boolean Operation | A Dive into Identity Shortcut Connection - The ResNet building block +[![Watch the video](https://img.youtube.com/vi/SEvWB7k8uy0/0.jpg)](https://youtu.be/SEvWB7k8uy0) | [![Watch the video](https://img.youtube.com/vi/3QwIaAsDmJw/0.jpg)](https://youtu.be/3QwIaAsDmJw) | [![Watch the video](https://img.youtube.com/vi/VH6v3D5cxxs/0.jpg)](https://youtu.be/VH6v3D5cxxs) +Increasing Image Resolution with Neural Networks | Ultra Fast Single Precision Floating Point Computing | AVX and AVX2 Code Optimization Some videos make referrence to **uvolume** unit. The current **neuralvolume** unit used to be called **uvolume**. This is why it's mentioned. diff --git a/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpr b/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpr index 4670e225..bf72972e 100644 --- a/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpr +++ b/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpr @@ -133,7 +133,7 @@ TTestCNNAlgo = class(TCustomApplication) NN.AddLayer( TNNetDropout.Create(0.5) ); NN.AddLayer( TNNetMaxChannel.Create() ); NN.AddLayer( TNNetFullConnectLinear.Create(NumClasses) ); - NN.AddLayer( TNNetSoftMax.Create() ); + NN.AddLayer( TNNetSoftMax.Create({SkipBackpropDerivative=}1) ); NN.Layers[ NN.GetFirstImageNeuronalLayerIdx() ].InitBasicPatterns(); (* // First block shouldn't be separable. diff --git a/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpr b/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpr index a5c2aed5..a81b20d3 100644 --- a/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpr +++ b/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpr @@ -124,7 +124,7 @@ TTestCNNAlgo = class(TCustomApplication) NN.AddLayer( TNNetDropout.Create(0.25) ); NN.AddLayer( TNNetMaxChannel.Create() ); NN.AddLayer( TNNetFullConnectLinear.Create(NumClasses) ); - NN.AddLayer( TNNetSoftMax.Create() ); + NN.AddLayer( TNNetSoftMax.Create({SkipBackpropDerivative=}1) ); NN.Layers[ NN.GetFirstImageNeuronalLayerIdx() ].InitBasicPatterns(); WriteLn('Learning rate set to: [',fLearningRate:7:5,']'); diff --git a/examples/CaiOptimizedDenseNet/kOptimizedDenseNet.lpr b/examples/CaiOptimizedDenseNet/kOptimizedDenseNet.lpr index ef2d93af..c8aa1f2e 100644 --- a/examples/CaiOptimizedDenseNet/kOptimizedDenseNet.lpr +++ b/examples/CaiOptimizedDenseNet/kOptimizedDenseNet.lpr @@ -144,7 +144,7 @@ TTestCNNAlgo = class(TCustomApplication) NN.AddLayer( TNNetMaxChannel.Create() ); NN.AddLayer( TNNetReLU6.Create() ); NN.AddLayer( TNNetFullConnectLinear.Create(NumClasses) ); - NN.AddLayer( TNNetSoftMax.Create() ); + NN.AddLayer( TNNetSoftMax.Create({SkipBackpropDerivative=}1) ); NN.Layers[ NN.GetFirstImageNeuronalLayerIdx() ].InitBasicPatterns(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); diff --git a/examples/Hypotenuse/README.md b/examples/Hypotenuse/README.md index 60575f86..4d2d168c 100644 --- a/examples/Hypotenuse/README.md +++ b/examples/Hypotenuse/README.md @@ -1,5 +1,8 @@ # Learning Hypotenuse Function +## 2 Minutes Intro Video +[![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) + This example has these main steps: * Preparing training data * Creating the neural network diff --git a/examples/IdentityShortcutConnection/IdentityShortcutConnection.pas b/examples/IdentityShortcutConnection/IdentityShortcutConnection.pas index 1e4d8115..0e1e4416 100644 --- a/examples/IdentityShortcutConnection/IdentityShortcutConnection.pas +++ b/examples/IdentityShortcutConnection/IdentityShortcutConnection.pas @@ -51,7 +51,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(4), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugWeights(); NN.DebugStructure(); diff --git a/examples/IdentityShortcutConnection/README.md b/examples/IdentityShortcutConnection/README.md index d4042b04..3fc5b272 100644 --- a/examples/IdentityShortcutConnection/README.md +++ b/examples/IdentityShortcutConnection/README.md @@ -1,10 +1,10 @@ # Identity Shortcut Connection -The **identity shortcut connection** is a connection that skips few layers and then is summed to the output of a following +The **identity shortcut connection** is a connection that skips few layers (usually 2 layers) and then is summed with the output of a following layer. You can find more about it in the paper [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) and [here](https://towardsdatascience.com/an-overview-of-resnet-and-its-variants-5281e2f56035). -The main point of attention is the **summation** of outputs. In CAI, this is done via the `TNNetSum` class. This class gets an array -of layers as an input and sums all inputs. For this summation to work, the shape of each input must be the same otherwise you'll +The main point of attention is the **summation** of outputs. In CAI, this is done via the `TNNetSum` class. `TNNetSum` sums +an array of input layers. For this summation to work, the shape of each input must be the same otherwise you'll get a run time error. The current example shows this: ``` GlueLayer := NN.AddLayer(TNNetReLU.Create()); diff --git a/examples/ImageClassifierSELU/ImageClassifierSELU.lpr b/examples/ImageClassifierSELU/ImageClassifierSELU.lpr index 8ccfec25..96dfb61b 100644 --- a/examples/ImageClassifierSELU/ImageClassifierSELU.lpr +++ b/examples/ImageClassifierSELU/ImageClassifierSELU.lpr @@ -45,7 +45,7 @@ TTestCNNAlgo = class(TCustomApplication) NN.AddLayer( TNNetMaxPool.Create(2) ); NN.AddLayer( TNNetSELU.Create() ); NN.AddLayer( TNNetFullConnectLinear.Create(10) ); - NN.AddLayer( TNNetSoftMax.Create() ); + NN.AddLayer( TNNetSoftMax.Create({SkipBackpropDerivative=}1) ); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); diff --git a/examples/MalariaImageClassification/MalariaImageClassification.pas b/examples/MalariaImageClassification/MalariaImageClassification.pas index b44f46d5..34d43e27 100644 --- a/examples/MalariaImageClassification/MalariaImageClassification.pas +++ b/examples/MalariaImageClassification/MalariaImageClassification.pas @@ -46,7 +46,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(2), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); // change ProportionToLoad to a smaller number if you don't have available 4GB of RAM. diff --git a/examples/ResNet/CaiResNet20.lpr b/examples/ResNet/CaiResNet20.lpr index 7f9313b0..4d0db767 100644 --- a/examples/ResNet/CaiResNet20.lpr +++ b/examples/ResNet/CaiResNet20.lpr @@ -109,7 +109,7 @@ procedure CaiOptimizedResnetUnit(pNN: TNNet; pNeurons: integer); NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0.0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}32, {epochs=}50); NeuralFit.Free; ReadLn(); diff --git a/examples/ResNet/ResNet20.lpr b/examples/ResNet/ResNet20.lpr index 32eafc88..2e898ec0 100644 --- a/examples/ResNet/ResNet20.lpr +++ b/examples/ResNet/ResNet20.lpr @@ -80,7 +80,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0.0; //NeuralFit.MaxThreadNum := 1; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}32, {epochs=}50); NeuralFit.Free; diff --git a/examples/SeparableConvolution/SeparableConvolution.lpr b/examples/SeparableConvolution/SeparableConvolution.lpr index 9cf800fb..0e49421f 100644 --- a/examples/SeparableConvolution/SeparableConvolution.lpr +++ b/examples/SeparableConvolution/SeparableConvolution.lpr @@ -42,7 +42,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugWeights(); NN.DebugStructure(); diff --git a/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr b/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr index d82aeaf4..21112f2b 100644 --- a/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr +++ b/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr @@ -2,6 +2,12 @@ (* Coded by Joao Paulo Schwarz Schuler. https://github.com/joaopauloschuler/neural-api + ----------------------------------------------- + The code shows an example of training and fitting a convolutional + neural network (CNN) using the Fashion MNIST dataset. It creates a neural + network with specific layers and configurations, loads the fashion MNIST data, + and then trains the network using the provided data. The code also sets + various parameters for training, such as learning rate, decay, and batch size. *) {$mode objfpc}{$H+} @@ -15,34 +21,42 @@ TTestCNNAlgo = class(TCustomApplication) procedure DoRun; override; end; + // Implementation of the TTestCNNAlgo class procedure TTestCNNAlgo.DoRun; var - NN: TNNet; - NeuralFit: TNeuralImageFit; - ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; + NN: TNNet; // Neural network object + NeuralFit: TNeuralImageFit; // Object for training and fitting the neural network + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; // Lists of training, validation, and test image volumes begin + // Checking if the MNIST files exist and loading the data if Not(CheckMNISTFile('train', {IsFashion=}true)) or Not(CheckMNISTFile('t10k', {IsFashion=}true)) then begin Terminate; exit; end; + WriteLn('Creating Neural Network...'); - NN := THistoricalNets.Create(); + + // Creating the neural network with specific layers and configurations + NN := TNNet.Create(); NN.AddLayer([ - TNNetInput.Create(28, 28, 1), - TNNetConvolutionLinear.Create(64, 5, 2, 1, 1), - TNNetMaxPool.Create(4), - TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), - TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), - TNNetFullConnectReLU.Create(32), - TNNetFullConnectReLU.Create(32), - TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetInput.Create(28, 28, 1), // Input layer for 28x28 grayscale images + TNNetConvolutionLinear.Create(64, 5, 2, 1, 1), // Convolutional layer with linear activation + TNNetMaxPool.Create(4), // Max pooling layer + TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), // Convolutional layer with ReLU activation + TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), // Convolutional layer with ReLU activation + TNNetFullConnectReLU.Create(32), // Fully connected layer with ReLU activation + TNNetFullConnectReLU.Create(32), // Fully connected layer with ReLU activation + TNNetFullConnectLinear.Create(10), // Fully connected layer with linear activation + TNNetSoftMax.Create({SkipBackpropDerivative=}1) // Softmax layer for classification ]); + + // Creating the training, validation, and test image volumes from the fashion MNIST files CreateMNISTVolumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, 'train', 't10k', {Verbose=}true, {IsFashion=}true); + // Creating and configuring the NeuralFit object for training the neural network NeuralFit := TNeuralImageFit.Create; NeuralFit.FileNameBase := 'SimpleFashionMNIST'; NeuralFit.InitialLearningRate := 0.001; @@ -53,21 +67,25 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.HasFlipX := true; NeuralFit.HasFlipY := false; NeuralFit.MaxCropSize := 4; + + // Training and fitting the neural network using the provided data NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}128, {epochs=}50); - NeuralFit.Free; - NN.Free; - ImgTestVolumes.Free; - ImgValidationVolumes.Free; - ImgTrainingVolumes.Free; + NeuralFit.Free; // Freeing the NeuralFit object + + NN.Free; // Freeing the neural network object + ImgTestVolumes.Free; // Freeing the test data volumes + ImgValidationVolumes.Free; // Freeing the validation data volumes + ImgTrainingVolumes.Free; // Freeing the training data volumes Terminate; end; var Application: TTestCNNAlgo; begin - Application := TTestCNNAlgo.Create(nil); - Application.Title:='Simple Fashion MNIST Classification Example'; - Application.Run; - Application.Free; + Application := TTestCNNAlgo.Create(nil); // Creating an instance of the TTestCNNAlgo class + Application.Title:='Simple Fashion MNIST Classification Example'; // Setting the application title + Application.Run; // Running the application + Application.Free; // Freeing the application instance end. + diff --git a/examples/SimpleImageClassifier/README.md b/examples/SimpleImageClassifier/README.md index 0bce7044..69e4895e 100644 --- a/examples/SimpleImageClassifier/README.md +++ b/examples/SimpleImageClassifier/README.md @@ -29,22 +29,80 @@ Later on, this is how the training/fitting is called: NeuralFit := TNeuralImageFit.Create; NeuralFit.FileNameBase := 'SimpleImageClassifier'; NeuralFit.InitialLearningRate := 0.001; - NeuralFit.LearningRateDecay := 0.005; - NeuralFit.StaircaseEpochs := 17; + NeuralFit.LearningRateDecay := 0.01; + NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; - NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}128, {epochs=}100); + NeuralFit.L2Decay := 0; + NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); ``` -There is a trick that you can do with this API or any other API when working with image classification: **you can increase the input image size**. +## Beyond ReLU Activation Function -As per the following example, by increasing CIFAR-10 input image sizes from 32x32 to 48x48, you can gain up to 2% in classification accuracy. +The paper [Searching for Activation Functions](https://arxiv.org/abs/1710.05941) describes the search for a better activation function than **ReLU**. In their work, the authors found **Swish** to be the best replacement for **ReLU**. The downside of Swish is: it requires a lot of computation to calculate it. Later, the paper [Searching for MobileNetV3](https://arxiv.org/pdf/1905.02244v5.pdf) introduces the **Hard Swish** activation function. The **Hard Swish** gives similar results to **Swish** with a lot less computation. -You can change image sizes with: +The same neural network shown above could be implemented with **Swish** as ``` -ImgTrainingVolumes.ResizeImage(48, 48); -ImgValidationVolumes.ResizeImage(48, 48); -ImgTestVolumes.ResizeImage(48, 48); + NN.AddLayer([ + TNNetInput.Create(32, 32, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), + TNNetMaxPool.Create(4), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(10), + TNNetSoftMax.Create() + ]); +``` + +or as +``` + NN.AddLayer([ + TNNetInput.Create(32, 32, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), + TNNetMaxPool.Create(4), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(10), + TNNetSoftMax.Create() + ]); +``` + +The Hard Swish variant is implemented with: +``` + NN.AddLayer([ + TNNetInput.Create(32, 32, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), + TNNetMaxPool.Create(4), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(10), + TNNetSoftMax.Create() + ]); ``` -You can find an implementation with this trick at the [SimpleImageClassifierResize48.lpr](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr) file. There is also another implementation resizing to CIFAR-10 to 64x64 pixels but the gain won't be too big. + +These are the CIFAR-10 classification accuracies with ReLU, Swish and HardSwish activation functions: + +Activation Function (source) | Test Classification Accuracy (%) +---------------------------- | ------------------------------------- +[ReLU](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifier.lpr) | [85.53%](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/results/SimpleImageClassifier20221206.csv) +[Swish](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr) | [86.55%](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/results/SimpleImageClassifierSwish20221207.csv) +[Hard Swish](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifierHardSwish.lpr) | [86.82%](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/results/SimpleImageClassifierHardSwish20221208.csv) diff --git a/examples/SimpleImageClassifier/SimpleImageClassifier.lpr b/examples/SimpleImageClassifier/SimpleImageClassifier.lpr index f823b7d8..47fc1e16 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifier.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifier.lpr @@ -41,7 +41,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr index 98e6e9ad..8ede0d4c 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr @@ -34,30 +34,30 @@ TTestCNNAlgo = class(TCustomApplication) TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), TNNetMaxPool.Create(4) ]); - NN.AddGroupedConvolution(TNNetConvolutionReLU, - {Groups=}8, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); - NN.AddGroupedConvolution(TNNetConvolutionReLU, + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); - NN.AddGroupedConvolution(TNNetConvolutionReLU, - {Groups=}8, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); - NN.AddGroupedConvolution(TNNetConvolutionReLU, + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, + {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, + {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); NN.AddLayer([ TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); NeuralFit := TNeuralImageFit.Create; - NeuralFit.FileNameBase := 'SimpleImageClassifier-'+IntToStr(GetProcessId()); + NeuralFit.FileNameBase := 'SimpleImageClassifierGroupedConv-'+IntToStr(GetProcessId()); NeuralFit.InitialLearningRate := 0.001; NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; //NeuralFit.MaxThreadNum := 1; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr index 2a51a64d..7c664ad6 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr @@ -46,7 +46,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); @@ -79,7 +79,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr index 554350f2..accae993 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr @@ -50,7 +50,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); @@ -83,7 +83,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr index d809b8af..749420b7 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr @@ -45,7 +45,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); @@ -56,7 +56,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr index e6bd6809..1cb0c4a8 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr @@ -40,7 +40,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); @@ -54,7 +54,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.MaxCropSize := 12; //NeuralFit.MaxThreadNum := 8; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr index e0ef256e..ebd7a1ae 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr @@ -40,7 +40,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); @@ -54,7 +54,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.MaxCropSize := 16; //NeuralFit.MaxThreadNum := 8; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierSharedWeights.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierSharedWeights.lpr index 42c37a91..ab10f133 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierSharedWeights.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierSharedWeights.lpr @@ -56,7 +56,7 @@ TTestCNNAlgo = class(TCustomApplication) NN.AddLayer([ TNNetDeepConcat.Create([FirstBranch, SecondBranch]), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr index bd72f63e..e7f3dd25 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr @@ -45,7 +45,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); @@ -56,7 +56,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpr b/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpr index e17f67b5..e3b7ac3c 100644 --- a/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpr +++ b/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpr @@ -57,7 +57,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); diff --git a/examples/SimpleImageClassifierParallel/SimpleImageClassifierParallel.pas b/examples/SimpleImageClassifierParallel/SimpleImageClassifierParallel.pas index c058e92d..71c0b423 100644 --- a/examples/SimpleImageClassifierParallel/SimpleImageClassifierParallel.pas +++ b/examples/SimpleImageClassifierParallel/SimpleImageClassifierParallel.pas @@ -81,7 +81,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); diff --git a/examples/SimpleMNist/SimpleMNist.lpr b/examples/SimpleMNist/SimpleMNist.lpr index 1ea56a11..6ae17b35 100644 --- a/examples/SimpleMNist/SimpleMNist.lpr +++ b/examples/SimpleMNist/SimpleMNist.lpr @@ -7,7 +7,8 @@ uses {$IFDEF UNIX} {$IFDEF UseCThreads} cthreads, {$ENDIF} {$ENDIF} - Classes, SysUtils, CustApp, neuralnetwork, neuralvolume, Math, neuraldatasets, neuralfit; + Classes, SysUtils, CustApp, neuralnetwork, neuralvolume, Math, + neuraldatasets, neuralfit; type TTestCNNAlgo = class(TCustomApplication) @@ -17,17 +18,21 @@ TTestCNNAlgo = class(TCustomApplication) procedure TTestCNNAlgo.DoRun; var - NN: THistoricalNets; - NeuralFit: TNeuralImageFit; - ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; + NN: TNNet; // Neural network object + NeuralFit: TNeuralImageFit; // Object for neural network fitting + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; // Volumes for training, validation, and testing begin - if Not(CheckMNISTFile('train')) or Not(CheckMNISTFile('t10k')) then + // Check if MNIST files exist + if not (CheckMNISTFile('train')) or not (CheckMNISTFile('t10k')) then begin Terminate; - exit; + Exit; // Exit the procedure if MNIST files are not found end; + WriteLn('Creating Neural Network...'); - NN := THistoricalNets.Create(); + NN := TNNet.Create(); // Create an instance of the neural network + + // Define the layers of the neural network NN.AddLayer([ TNNetInput.Create(28, 28, 1), TNNetConvolutionLinear.Create(32, 5, 2, 1, 1), @@ -39,10 +44,13 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.2), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); + + // Create MNIST volumes for training, validation, and testing CreateMNISTVolumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, 'train', 't10k'); + // Configure the neural network fitting NeuralFit := TNeuralImageFit.Create; NeuralFit.FileNameBase := 'SimpleMNist'; NeuralFit.InitialLearningRate := 0.001; @@ -50,12 +58,15 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; NeuralFit.L2Decay := 0.00001; - NeuralFit.HasFlipX := false; - NeuralFit.HasFlipY := false; + NeuralFit.HasFlipX := False; + NeuralFit.HasFlipY := False; NeuralFit.MaxCropSize := 4; + + // Fit the neural network using the training, validation, and testing volumes NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}128, {epochs=}20); NeuralFit.Free; + // Clean up resources NN.Free; ImgTestVolumes.Free; ImgValidationVolumes.Free; @@ -66,8 +77,8 @@ TTestCNNAlgo = class(TCustomApplication) var Application: TTestCNNAlgo; begin - Application := TTestCNNAlgo.Create(nil); - Application.Title:='MNist Classification Example'; - Application.Run; - Application.Free; + Application := TTestCNNAlgo.Create(nil); // Create an instance of TTestCNNAlgo + Application.Title := 'MNist Classification Example'; // Set the application title + Application.Run; // Run the application + Application.Free; // Free the application instance end. diff --git a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb index 9339281f..de2ff831 100644 --- a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb +++ b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb @@ -1,7494 +1,409 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This is a simple plant leaf disease classifier inspired from Data from: \n", - "# Identification of Plant Leaf Diseases Using a 9-layer Deep Convolutional Neural Network\n", - "# https://data.mendeley.com/datasets/tywbtsjrjv/1\n", - "\n", - "# This source code required the CAI Neural API found at:\n", - "# https://github.com/joaopauloschuler/neural-api\n", - "\n", - "# To be able to run this code, you'll need at least 32GB of RAM.\n", - "\n", - "has_plant_leaf_disease = True\n", - "has_tiny_imagenet_200 = False" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Reading package lists... Done\n", - "Building dependency tree \n", - "Reading state information... Done\n", - "unzip is already the newest version (6.0-21ubuntu1).\n", - "git is already the newest version (1:2.17.1-1ubuntu0.5).\n", - "The following additional packages will be installed:\n", - " adwaita-icon-theme autoconf automake autopoint autotools-dev bsdmainutils\n", - " debhelper dh-autoreconf dh-strip-nondeterminism file fontconfig\n", - " fontconfig-config fonts-dejavu-core fp-compiler-3.0.4 fp-docs-3.0.4\n", - " fp-ide-3.0.4 fp-units-base-3.0.4 fp-units-db-3.0.4 fp-units-fcl-3.0.4\n", - " fp-units-fv-3.0.4 fp-units-gfx-3.0.4 fp-units-gtk2-3.0.4 fp-units-math-3.0.4\n", - " fp-units-misc-3.0.4 fp-units-multimedia-3.0.4 fp-units-net-3.0.4\n", - " fp-units-rtl-3.0.4 fp-utils-3.0.4 fpc-3.0.4 fpc-source-3.0.4 gdb gdbserver\n", - " gettext gettext-base gir1.2-atk-1.0 gir1.2-freedesktop gir1.2-gdkpixbuf-2.0\n", - " gir1.2-gtk-2.0 gir1.2-harfbuzz-0.0 gir1.2-pango-1.0 groff-base\n", - " gtk-update-icon-cache hicolor-icon-theme humanity-icon-theme icu-devtools\n", - " intltool-debian lazarus-1.8 lazarus-doc-1.8 lazarus-ide lazarus-ide-1.8\n", - " lazarus-ide-gtk2-1.8 lazarus-src-1.8 lcl-1.8 lcl-gtk2-1.8 lcl-nogui-1.8\n", - " lcl-units-1.8 lcl-utils-1.8 liba52-0.7.4 liba52-0.7.4-dev libapr1\n", - " libaprutil1 libarchive-cpio-perl libarchive-zip-perl libasound2\n", - " libasound2-data libasound2-dev libasyncns0 libatk1.0-0 libatk1.0-data\n", - " libatk1.0-dev libavahi-client3 libavahi-common-data libavahi-common3\n", - " libbabeltrace1 libc6-dbg libcaca-dev libcaca0 libcairo-gobject2\n", - " libcairo-script-interpreter2 libcairo2 libcairo2-dev libcdt5 libcgraph6\n", - " libcroco3 libcups2 libdatrie1 libdca-dev libdca0 libdrm-amdgpu1\n", - " libdrm-common libdrm-dev libdrm-intel1 libdrm-nouveau2 libdrm-radeon1\n", - " libdrm2 libdts-dev libdw1 libegl-mesa0 libegl1 libelf1\n", - " libfile-stripnondeterminism-perl libflac-dev libflac8 libfluidsynth1\n", - " libfontconfig1 libfontconfig1-dev libforms-dev libforms2 libgail-common\n", - " libgail18 libgbm1 libgd-dev libgd3 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-bin\n", - " libgdk-pixbuf2.0-common libgdk-pixbuf2.0-dev libgl1 libgl1-mesa-dev\n", - " libgl1-mesa-dri libglapi-mesa libgles1 libgles2 libglib2.0-0 libglib2.0-bin\n", - " libglib2.0-data libglib2.0-dev libglib2.0-dev-bin libglu1-mesa\n", - " libglu1-mesa-dev libglvnd-core-dev libglvnd-dev libglvnd0 libglx-mesa0\n", - " libglx0 libgmp-dev libgmpxx4ldbl libgraphite2-3 libgraphite2-dev\n", - " libgraphviz-dev libgtk2.0-0 libgtk2.0-bin libgtk2.0-common libgtk2.0-dev\n", - " libgts-0.7-5 libgts-bin libgvc6 libgvc6-plugins-gtk libgvpr2 libharfbuzz-dev\n", - " libharfbuzz-gobject0 libharfbuzz-icu0 libharfbuzz0b libice-dev libice6\n", - " libicu-dev libicu-le-hb-dev libicu-le-hb0 libicu60 libiculx60 libidn11\n", - " libjack-jackd2-0 libjbig-dev libjbig0 liblab-gamut1 libllvm9 libltdl-dev\n", - " libltdl7 liblzma-dev liblzo2-2 libmad0 libmad0-dev libmagic-mgc libmagic1\n", - " libmail-sendmail-perl libmikmod-config libmikmod-dev libmikmod3\n", - " libmodplug-dev libmodplug1 libogg-dev libogg0 libopenal-data libopenal1\n", - " libopengl0 libpango-1.0-0 libpango1.0-dev libpangocairo-1.0-0\n", - " libpangoft2-1.0-0 libpangoxft-1.0-0 libpathplan4 libpciaccess0 libpcre16-3\n", - " libpcre3-dev libpcre32-3 libpcrecpp0v5 libpipeline1 libpixman-1-0\n", - " libpixman-1-dev libproxy-tools libproxy1v5 libpthread-stubs0-dev\n", - " libpulse-dev libpulse-mainloop-glib0 libpulse0 libpython-stdlib\n", - " libpython2.7-minimal libpython2.7-stdlib librsvg2-2 librsvg2-common\n", - " libsamplerate0 libsdl-mixer1.2 libsdl-mixer1.2-dev libsdl1.2-dev\n", - " libsdl1.2debian libsdl2-2.0-0 libsensors4 libserf-1-1 libsigsegv2 libslang2\n", - " libslang2-dev libsm-dev libsm6 libsndfile1 libsndio6.1 libsvn1\n", - " libsys-hostname-long-perl libthai-data libthai0 libtiff-dev libtiff5\n", - " libtiff5-dev libtiffxx5 libtimedate-perl libtool libvlc-bin libvlc-dev\n", - " libvlc5 libvlccore9 libvorbis-dev libvorbis0a libvorbisenc2 libvorbisfile3\n", - " libvpx-dev libvpx5 libwayland-client0 libwayland-cursor0 libwayland-egl1\n", - " libwayland-egl1-mesa libwayland-server0 libwebp6 libx11-6 libx11-data\n", - " libx11-dev libx11-doc libx11-xcb-dev libx11-xcb1 libxau-dev libxau6\n", - " libxcb-dri2-0 libxcb-dri2-0-dev libxcb-dri3-0 libxcb-dri3-dev libxcb-glx0\n", - " libxcb-glx0-dev libxcb-present-dev libxcb-present0 libxcb-randr0\n", - " libxcb-randr0-dev libxcb-render0 libxcb-render0-dev libxcb-shape0\n", - " libxcb-shape0-dev libxcb-shm0 libxcb-shm0-dev libxcb-sync-dev libxcb-sync1\n", - " libxcb-xfixes0 libxcb-xfixes0-dev libxcb1 libxcb1-dev libxcomposite-dev\n", - " libxcomposite1 libxcursor-dev libxcursor1 libxdamage-dev libxdamage1\n", - " libxdmcp-dev libxdmcp6 libxdot4 libxext-dev libxext6 libxfixes-dev\n", - " libxfixes3 libxft-dev libxft2 libxi-dev libxi6 libxinerama-dev libxinerama1\n", - " libxkbcommon0 libxml2 libxml2-utils libxpm-dev libxpm4 libxrandr-dev\n", - " libxrandr2 libxrender-dev libxrender1 libxshmfence-dev libxshmfence1 libxss1\n", - " libxt-dev libxt6 libxxf86dga-dev libxxf86dga1 libxxf86vm-dev libxxf86vm1 m4\n", - " man-db mesa-common-dev po-debconf python python-minimal python2.7\n", - " python2.7-minimal shared-mime-info timgm6mb-soundfont ubuntu-mono x11-common\n", - " x11proto-composite-dev x11proto-core-dev x11proto-damage-dev x11proto-dev\n", - " x11proto-fixes-dev x11proto-input-dev x11proto-randr-dev x11proto-xext-dev\n", - " x11proto-xf86dga-dev x11proto-xf86vidmode-dev x11proto-xinerama-dev xkb-data\n", - " xorg-sgml-doctools xtrans-dev\n", - "Suggested packages:\n", - " autoconf-archive gnu-standards autoconf-doc wamerican | wordlist whois\n", - " vacation dh-make dwz uuid-dev firebird-dev freetds-dev libgdbm-dev\n", - " default-libmysqlclient-dev libpq-dev libsqlite3-dev pxlib-dev unixodbc-dev\n", - " gdb-doc gettext-doc libasprintf-dev libgettextpo-dev groff\n", - " libasound2-plugins alsa-utils libasound2-doc libcairo2-doc cups-common\n", - " libforms-bin libforms-doc libgd-tools libglib2.0-doc gmp-doc libgmp10-doc\n", - " libmpfr-dev libgraphite2-utils gvfs libgtk2.0-doc libice-doc icu-doc jackd2\n", - " libtool-doc liblzma-doc libportaudio2 imagemagick libpango1.0-doc pciutils\n", - " pulseaudio librsvg2-bin lm-sensors libsm-doc sndiod gfortran\n", - " | fortran95-compiler gcj-jdk libxcb-doc libxext-doc libxt-doc m4-doc\n", - " apparmor www-browser libmail-box-perl python-doc python-tk python2.7-doc\n", - " binfmt-support db5.3-util libapache2-mod-svn subversion-tools\n", - " fluid-soundfont-gm fluidsynth timidity musescore\n", - "Recommended packages:\n", - " libggi2-dev xdg-user-dirs\n", - "The following NEW packages will be installed:\n", - " adwaita-icon-theme autoconf automake autopoint autotools-dev bsdmainutils\n", - " debhelper dh-autoreconf dh-strip-nondeterminism file fontconfig\n", - " fontconfig-config fonts-dejavu-core fp-compiler-3.0.4 fp-docs-3.0.4\n", - " fp-ide-3.0.4 fp-units-base-3.0.4 fp-units-db-3.0.4 fp-units-fcl-3.0.4\n", - " fp-units-fv-3.0.4 fp-units-gfx-3.0.4 fp-units-gtk2-3.0.4 fp-units-math-3.0.4\n", - " fp-units-misc-3.0.4 fp-units-multimedia-3.0.4 fp-units-net-3.0.4\n", - " fp-units-rtl-3.0.4 fp-utils-3.0.4 fpc fpc-3.0.4 fpc-source fpc-source-3.0.4\n", - " gdb gdbserver gettext gettext-base gir1.2-atk-1.0 gir1.2-freedesktop\n", - " gir1.2-gdkpixbuf-2.0 gir1.2-gtk-2.0 gir1.2-harfbuzz-0.0 gir1.2-pango-1.0\n", - " groff-base gtk-update-icon-cache hicolor-icon-theme humanity-icon-theme\n", - " icu-devtools intltool-debian lazarus lazarus-1.8 lazarus-doc-1.8 lazarus-ide\n", - " lazarus-ide-1.8 lazarus-ide-gtk2-1.8 lazarus-src-1.8 lcl-1.8 lcl-gtk2-1.8\n", - " lcl-nogui-1.8 lcl-units-1.8 lcl-utils-1.8 liba52-0.7.4 liba52-0.7.4-dev\n", - " libapr1 libaprutil1 libarchive-cpio-perl libarchive-zip-perl libasound2\n", - " libasound2-data libasound2-dev libasyncns0 libatk1.0-0 libatk1.0-data\n", - " libatk1.0-dev libavahi-client3 libavahi-common-data libavahi-common3\n", - " libbabeltrace1 libc6-dbg libcaca-dev libcaca0 libcairo-gobject2\n", - " libcairo-script-interpreter2 libcairo2 libcairo2-dev libcdt5 libcgraph6\n", - " libcroco3 libcups2 libdatrie1 libdca-dev libdca0 libdrm-amdgpu1\n", - " libdrm-common libdrm-dev libdrm-intel1 libdrm-nouveau2 libdrm-radeon1\n", - " libdrm2 libdts-dev libdw1 libegl-mesa0 libegl1 libelf1\n", - " libfile-stripnondeterminism-perl libflac-dev libflac8 libfluidsynth1\n", - " libfontconfig1 libfontconfig1-dev libforms-dev libforms2 libgail-common\n", - " libgail18 libgbm1 libgd-dev libgd3 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-bin\n", - " libgdk-pixbuf2.0-common libgdk-pixbuf2.0-dev libgl1 libgl1-mesa-dev\n", - " libgl1-mesa-dri libglapi-mesa libgles1 libgles2 libglib2.0-bin\n", - " libglib2.0-data libglib2.0-dev libglib2.0-dev-bin libglu1-mesa\n", - " libglu1-mesa-dev libglvnd-core-dev libglvnd-dev libglvnd0 libglx-mesa0\n", - " libglx0 libgmp-dev libgmpxx4ldbl libgraphite2-3 libgraphite2-dev\n", - " libgraphviz-dev libgtk2.0-0 libgtk2.0-bin libgtk2.0-common libgtk2.0-dev\n", - " libgts-0.7-5 libgts-bin libgvc6 libgvc6-plugins-gtk libgvpr2 libharfbuzz-dev\n", - " libharfbuzz-gobject0 libharfbuzz-icu0 libharfbuzz0b libice-dev libice6\n", - " libicu-dev libicu-le-hb-dev libicu-le-hb0 libicu60 libiculx60 libidn11\n", - " libjack-jackd2-0 libjbig-dev libjbig0 liblab-gamut1 libllvm9 libltdl-dev\n", - " libltdl7 liblzma-dev liblzo2-2 libmad0 libmad0-dev libmagic-mgc libmagic1\n", - " libmail-sendmail-perl libmikmod-config libmikmod-dev libmikmod3\n", - " libmodplug-dev libmodplug1 libogg-dev libogg0 libopenal-data libopenal1\n", - " libopengl0 libpango-1.0-0 libpango1.0-dev libpangocairo-1.0-0\n", - " libpangoft2-1.0-0 libpangoxft-1.0-0 libpathplan4 libpciaccess0 libpcre16-3\n", - " libpcre3-dev libpcre32-3 libpcrecpp0v5 libpipeline1 libpixman-1-0\n", - " libpixman-1-dev libproxy-tools libproxy1v5 libpthread-stubs0-dev\n", - " libpulse-dev libpulse-mainloop-glib0 libpulse0 libpython-stdlib\n", - " libpython2.7-minimal libpython2.7-stdlib librsvg2-2 librsvg2-common\n", - " libsamplerate0 libsdl-mixer1.2 libsdl-mixer1.2-dev libsdl1.2-dev\n", - " libsdl1.2debian libsdl2-2.0-0 libsensors4 libserf-1-1 libsigsegv2 libslang2\n", - " libslang2-dev libsm-dev libsm6 libsndfile1 libsndio6.1 libsvn1\n", - " libsys-hostname-long-perl libthai-data libthai0 libtiff-dev libtiff5\n", - " libtiff5-dev libtiffxx5 libtimedate-perl libtool libvlc-bin libvlc-dev\n", - " libvlc5 libvlccore9 libvorbis-dev libvorbis0a libvorbisenc2 libvorbisfile3\n", - " libvpx-dev libvpx5 libwayland-client0 libwayland-cursor0 libwayland-egl1\n", - " libwayland-egl1-mesa libwayland-server0 libwebp6 libx11-6 libx11-data\n", - " libx11-dev libx11-doc libx11-xcb-dev libx11-xcb1 libxau-dev libxau6\n", - " libxcb-dri2-0 libxcb-dri2-0-dev libxcb-dri3-0 libxcb-dri3-dev libxcb-glx0\n", - " libxcb-glx0-dev libxcb-present-dev libxcb-present0 libxcb-randr0\n", - " libxcb-randr0-dev libxcb-render0 libxcb-render0-dev libxcb-shape0\n", - " libxcb-shape0-dev libxcb-shm0 libxcb-shm0-dev libxcb-sync-dev libxcb-sync1\n", - " libxcb-xfixes0 libxcb-xfixes0-dev libxcb1 libxcb1-dev libxcomposite-dev\n", - " libxcomposite1 libxcursor-dev libxcursor1 libxdamage-dev libxdamage1\n", - " libxdmcp-dev libxdmcp6 libxdot4 libxext-dev libxext6 libxfixes-dev\n", - " libxfixes3 libxft-dev libxft2 libxi-dev libxi6 libxinerama-dev libxinerama1\n", - " libxkbcommon0 libxml2 libxml2-utils libxpm-dev libxpm4 libxrandr-dev\n", - " libxrandr2 libxrender-dev libxrender1 libxshmfence-dev libxshmfence1 libxss1\n", - " libxt-dev libxt6 libxxf86dga-dev libxxf86dga1 libxxf86vm-dev libxxf86vm1 m4\n", - " man-db mesa-common-dev po-debconf python python-minimal python2.7\n", - " python2.7-minimal shared-mime-info subversion timgm6mb-soundfont ubuntu-mono\n", - " x11-common x11proto-composite-dev x11proto-core-dev x11proto-damage-dev\n", - " x11proto-dev x11proto-fixes-dev x11proto-input-dev x11proto-randr-dev\n", - " x11proto-xext-dev x11proto-xf86dga-dev x11proto-xf86vidmode-dev\n", - " x11proto-xinerama-dev xkb-data xorg-sgml-doctools xtrans-dev zip\n", - "The following packages will be upgraded:\n", - " libglib2.0-0\n", - "1 upgraded, 348 newly installed, 0 to remove and 37 not upgraded.\n", - "Need to get 243 MB of archives.\n", - "After this operation, 1802 MB of additional disk space will be used.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:1 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxau6 amd64 1:1.0.8-1 [8376 B]\n", - "Get:2 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdmcp6 amd64 1:1.1.2-3 [10.7 kB]\n", - "Get:3 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb1 amd64 1.13-2~ubuntu18.04 [45.5 kB]\n", - "Get:4 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-data all 2:1.6.4-3ubuntu0.2 [113 kB]\n", - "Get:5 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-6 amd64 2:1.6.4-3ubuntu0.2 [569 kB]\n", - "Get:6 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxext6 amd64 2:1.3.3-1 [29.4 kB]\n", - "Get:7 http://archive.ubuntu.com/ubuntu bionic/main amd64 bsdmainutils amd64 11.1.2ubuntu1 [181 kB]\n", - "Get:8 http://archive.ubuntu.com/ubuntu bionic/main amd64 groff-base amd64 1.22.3-10 [1153 kB]\n", - "Get:9 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpipeline1 amd64 1.5.0-1 [25.3 kB]\n", - "Get:10 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 man-db amd64 2.8.3-2ubuntu0.1 [1019 kB]\n", - "Get:11 http://archive.ubuntu.com/ubuntu bionic/main amd64 fonts-dejavu-core all 2.37-1 [1041 kB]\n", - "Get:12 http://archive.ubuntu.com/ubuntu bionic/main amd64 fontconfig-config all 2.12.6-0ubuntu2 [55.8 kB]\n", - "Get:13 http://archive.ubuntu.com/ubuntu bionic/main amd64 libfontconfig1 amd64 2.12.6-0ubuntu2 [137 kB]\n", - "Get:14 http://archive.ubuntu.com/ubuntu bionic/main amd64 fontconfig amd64 2.12.6-0ubuntu2 [169 kB]\n", - "Get:15 http://archive.ubuntu.com/ubuntu bionic/main amd64 liblzo2-2 amd64 2.08-1.2 [48.7 kB]\n", - "Get:16 http://archive.ubuntu.com/ubuntu bionic/main amd64 libogg0 amd64 1.3.2-1 [17.2 kB]\n", - "Get:17 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 x11-common all 1:7.7+19ubuntu7.1 [22.5 kB]\n", - "Get:18 http://archive.ubuntu.com/ubuntu bionic/main amd64 libice6 amd64 2:1.0.9-2 [40.2 kB]\n", - "Get:19 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsm6 amd64 2:1.2.2-1 [15.8 kB]\n", - "Get:20 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrender1 amd64 1:0.9.10-1 [18.7 kB]\n", - "Get:21 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxft2 amd64 2.3.2-1 [36.1 kB]\n", - "Get:22 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxinerama1 amd64 2:1.1.3-1 [7908 B]\n", - "Get:23 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxss1 amd64 1:1.2.2-1 [8582 B]\n", - "Get:24 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86dga1 amd64 2:1.1.4-1 [13.7 kB]\n", - "Get:25 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86vm1 amd64 1:1.1.4-1 [10.6 kB]\n", - "Get:26 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpython2.7-minimal amd64 2.7.17-1~18.04 [335 kB]\n", - "Get:27 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 python2.7-minimal amd64 2.7.17-1~18.04 [1294 kB]\n", - "Get:28 http://archive.ubuntu.com/ubuntu bionic/main amd64 python-minimal amd64 2.7.15~rc1-1 [28.1 kB]\n", - "Get:29 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpython2.7-stdlib amd64 2.7.17-1~18.04 [1915 kB]\n", - "Get:30 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 python2.7 amd64 2.7.17-1~18.04 [248 kB]\n", - "Get:31 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpython-stdlib amd64 2.7.15~rc1-1 [7620 B]\n", - "Get:32 http://archive.ubuntu.com/ubuntu bionic/main amd64 python amd64 2.7.15~rc1-1 [140 kB]\n", - "Get:33 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libmagic-mgc amd64 1:5.32-2ubuntu0.3 [184 kB]\n", - "Get:34 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libmagic1 amd64 1:5.32-2ubuntu0.3 [68.7 kB]\n", - "Get:35 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 file amd64 1:5.32-2ubuntu0.3 [22.1 kB]\n", - "Get:36 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libelf1 amd64 0.170-0.4ubuntu0.1 [44.8 kB]\n", - "Get:37 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-0 amd64 2.56.4-0ubuntu0.18.04.6 [1171 kB]\n", - "Get:38 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-data all 2.56.4-0ubuntu0.18.04.6 [4540 B]\n", - "Get:39 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libicu60 amd64 60.2-3ubuntu3.1 [8054 kB]\n", - "Get:40 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libidn11 amd64 1.33-2.1ubuntu1.2 [46.6 kB]\n", - "Get:41 http://archive.ubuntu.com/ubuntu bionic/main amd64 libslang2 amd64 2.3.1a-3ubuntu1 [424 kB]\n", - "Get:42 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxml2 amd64 2.9.4+dfsg1-6.1ubuntu1.3 [663 kB]\n", - "Get:43 http://archive.ubuntu.com/ubuntu bionic/main amd64 shared-mime-info amd64 1.9-2 [426 kB]\n", - "Get:44 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 xkb-data all 2.23.1-1ubuntu1.18.04.1 [325 kB]\n", - "Get:45 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gettext-base amd64 0.19.8.1-6ubuntu0.3 [113 kB]\n", - "Get:46 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-common all 2.4.99-1ubuntu1~18.04.2 [5328 B]\n", - "Get:47 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm2 amd64 2.4.99-1ubuntu1~18.04.2 [31.7 kB]\n", - "Get:48 http://archive.ubuntu.com/ubuntu bionic/main amd64 hicolor-icon-theme all 0.17-2 [9976 B]\n", - "Get:49 http://archive.ubuntu.com/ubuntu bionic/main amd64 libjbig0 amd64 2.1-3.1build1 [26.7 kB]\n", - "Get:50 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiff5 amd64 4.0.9-5ubuntu0.3 [153 kB]\n", - "Get:51 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-common all 2.36.11-2 [4536 B]\n", - "Get:52 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-0 amd64 2.36.11-2 [165 kB]\n", - "Get:53 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gtk-update-icon-cache amd64 3.22.30-1ubuntu4 [28.3 kB]\n", - "Get:54 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpixman-1-0 amd64 0.34.0-2 [229 kB]\n", - "Get:55 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-render0 amd64 1.13-2~ubuntu18.04 [14.7 kB]\n", - "Get:56 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shm0 amd64 1.13-2~ubuntu18.04 [5600 B]\n", - "Get:57 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo2 amd64 1.15.10-2ubuntu0.1 [580 kB]\n", - "Get:58 http://archive.ubuntu.com/ubuntu bionic/main amd64 libcroco3 amd64 0.6.12-2 [81.3 kB]\n", - "Get:59 http://archive.ubuntu.com/ubuntu bionic/main amd64 libthai-data all 0.1.27-2 [133 kB]\n", - "Get:60 http://archive.ubuntu.com/ubuntu bionic/main amd64 libdatrie1 amd64 0.2.10-7 [17.8 kB]\n", - "Get:61 http://archive.ubuntu.com/ubuntu bionic/main amd64 libthai0 amd64 0.1.27-2 [18.0 kB]\n", - "Get:62 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpango-1.0-0 amd64 1.40.14-1ubuntu0.1 [153 kB]\n", - "Get:63 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgraphite2-3 amd64 1.3.11-2 [78.7 kB]\n", - "Get:64 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz0b amd64 1.7.2-1ubuntu1 [232 kB]\n", - "Get:65 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpangoft2-1.0-0 amd64 1.40.14-1ubuntu0.1 [33.2 kB]\n", - "Get:66 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpangocairo-1.0-0 amd64 1.40.14-1ubuntu0.1 [20.8 kB]\n", - "Get:67 http://archive.ubuntu.com/ubuntu bionic/main amd64 librsvg2-2 amd64 2.40.20-2 [98.6 kB]\n", - "Get:68 http://archive.ubuntu.com/ubuntu bionic/main amd64 librsvg2-common amd64 2.40.20-2 [5124 B]\n", - "Get:69 http://archive.ubuntu.com/ubuntu bionic/main amd64 humanity-icon-theme all 0.6.15 [1250 kB]\n", - "Get:70 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 ubuntu-mono all 16.10+18.04.20181005-0ubuntu1 [149 kB]\n", - "Get:71 http://archive.ubuntu.com/ubuntu bionic/main amd64 adwaita-icon-theme all 3.28.0-1ubuntu1 [3306 kB]\n", - "Get:72 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsigsegv2 amd64 2.12-1 [14.7 kB]\n", - "Get:73 http://archive.ubuntu.com/ubuntu bionic/main amd64 m4 amd64 1.4.18-1 [197 kB]\n", - "Get:74 http://archive.ubuntu.com/ubuntu bionic/main amd64 autoconf all 2.69-11 [322 kB]\n", - "Get:75 http://archive.ubuntu.com/ubuntu bionic/main amd64 autotools-dev all 20180224.1 [39.6 kB]\n", - "Get:76 http://archive.ubuntu.com/ubuntu bionic/main amd64 automake all 1:1.15.1-3ubuntu2 [509 kB]\n", - "Get:77 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 autopoint all 0.19.8.1-6ubuntu0.3 [426 kB]\n", - "Get:78 http://archive.ubuntu.com/ubuntu bionic/main amd64 libtool all 2.4.6-2 [194 kB]\n", - "Get:79 http://archive.ubuntu.com/ubuntu bionic/main amd64 dh-autoreconf all 17 [15.8 kB]\n", - "Get:80 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libarchive-zip-perl all 1.60-1ubuntu0.1 [84.6 kB]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:81 http://archive.ubuntu.com/ubuntu bionic/main amd64 libfile-stripnondeterminism-perl all 0.040-1.1~build1 [13.8 kB]\n", - "Get:82 http://archive.ubuntu.com/ubuntu bionic/main amd64 libtimedate-perl all 2.3000-2 [37.5 kB]\n", - "Get:83 http://archive.ubuntu.com/ubuntu bionic/main amd64 dh-strip-nondeterminism all 0.040-1.1~build1 [5208 B]\n", - "Get:84 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gettext amd64 0.19.8.1-6ubuntu0.3 [1293 kB]\n", - "Get:85 http://archive.ubuntu.com/ubuntu bionic/main amd64 intltool-debian all 0.35.0+20060710.4 [24.9 kB]\n", - "Get:86 http://archive.ubuntu.com/ubuntu bionic/main amd64 po-debconf all 1.0.20 [232 kB]\n", - "Get:87 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 debhelper all 11.1.6ubuntu2 [902 kB]\n", - "Get:88 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-rtl-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [2039 kB]\n", - "Get:89 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-compiler-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1949 kB]\n", - "Get:90 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-docs-3.0.4 all 3.0.4+dfsg-18ubuntu2 [882 kB]\n", - "Get:91 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-ide-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1453 kB]\n", - "Get:92 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-base-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1050 kB]\n", - "Get:93 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-db-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [745 kB]\n", - "Get:94 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-fcl-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [4798 kB]\n", - "Get:95 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-fv-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [297 kB]\n", - "Get:96 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-gfx-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1099 kB]\n", - "Get:97 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-common all 2.24.32-1ubuntu1 [125 kB]\n", - "Get:98 http://archive.ubuntu.com/ubuntu bionic/main amd64 libatk1.0-data all 2.28.1-1 [2992 B]\n", - "Get:99 http://archive.ubuntu.com/ubuntu bionic/main amd64 libatk1.0-0 amd64 2.28.1-1 [43.9 kB]\n", - "Get:100 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libavahi-common-data amd64 0.7-3.1ubuntu1.2 [22.1 kB]\n", - "Get:101 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libavahi-common3 amd64 0.7-3.1ubuntu1.2 [21.6 kB]\n", - "Get:102 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libavahi-client3 amd64 0.7-3.1ubuntu1.2 [25.2 kB]\n", - "Get:103 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcups2 amd64 2.2.7-1ubuntu2.7 [211 kB]\n", - "Get:104 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcomposite1 amd64 1:0.4.4-2 [6988 B]\n", - "Get:105 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxfixes3 amd64 1:5.0.3-1 [10.8 kB]\n", - "Get:106 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcursor1 amd64 1:1.1.15-1 [19.8 kB]\n", - "Get:107 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdamage1 amd64 1:1.1.4-3 [6934 B]\n", - "Get:108 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxi6 amd64 2:1.7.9-1 [29.2 kB]\n", - "Get:109 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrandr2 amd64 2:1.5.1-1 [18.1 kB]\n", - "Get:110 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-0 amd64 2.24.32-1ubuntu1 [1769 kB]\n", - "Get:111 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-atk-1.0 amd64 2.28.1-1 [17.8 kB]\n", - "Get:112 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-freedesktop amd64 1.56.1-1 [9080 B]\n", - "Get:113 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-gdkpixbuf-2.0 amd64 2.36.11-2 [7748 B]\n", - "Get:114 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpangoxft-1.0-0 amd64 1.40.14-1ubuntu0.1 [15.0 kB]\n", - "Get:115 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gir1.2-pango-1.0 amd64 1.40.14-1ubuntu0.1 [21.6 kB]\n", - "Get:116 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-gtk-2.0 amd64 2.24.32-1ubuntu1 [172 kB]\n", - "Get:117 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-bin amd64 2.56.4-0ubuntu0.18.04.6 [68.8 kB]\n", - "Get:118 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-dev-bin amd64 2.56.4-0ubuntu0.18.04.6 [102 kB]\n", - "Get:119 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcre16-3 amd64 2:8.39-9 [147 kB]\n", - "Get:120 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcre32-3 amd64 2:8.39-9 [138 kB]\n", - "Get:121 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcrecpp0v5 amd64 2:8.39-9 [15.3 kB]\n", - "Get:122 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcre3-dev amd64 2:8.39-9 [537 kB]\n", - "Get:123 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-dev amd64 2.56.4-0ubuntu0.18.04.6 [1385 kB]\n", - "Get:124 http://archive.ubuntu.com/ubuntu bionic/main amd64 xorg-sgml-doctools all 1:1.11-1 [12.9 kB]\n", - "Get:125 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-dev all 2018.4-4 [251 kB]\n", - "Get:126 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-core-dev all 2018.4-4 [2620 B]\n", - "Get:127 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxau-dev amd64 1:1.0.8-1 [11.1 kB]\n", - "Get:128 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdmcp-dev amd64 1:1.1.2-3 [25.1 kB]\n", - "Get:129 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-input-dev all 2018.4-4 [2620 B]\n", - "Get:130 http://archive.ubuntu.com/ubuntu bionic/main amd64 xtrans-dev all 1.3.5-1 [70.5 kB]\n", - "Get:131 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpthread-stubs0-dev amd64 0.3-4 [4068 B]\n", - "Get:132 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb1-dev amd64 1.13-2~ubuntu18.04 [80.0 kB]\n", - "Get:133 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-dev amd64 2:1.6.4-3ubuntu0.2 [640 kB]\n", - "Get:134 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-dev amd64 2.36.11-2 [46.8 kB]\n", - "Get:135 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo-gobject2 amd64 1.15.10-2ubuntu0.1 [17.1 kB]\n", - "Get:136 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo-script-interpreter2 amd64 1.15.10-2ubuntu0.1 [53.5 kB]\n", - "Get:137 http://archive.ubuntu.com/ubuntu bionic/main amd64 libfontconfig1-dev amd64 2.12.6-0ubuntu2 [689 kB]\n", - "Get:138 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrender-dev amd64 1:0.9.10-1 [24.9 kB]\n", - "Get:139 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xext-dev all 2018.4-4 [2620 B]\n", - "Get:140 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxext-dev amd64 2:1.3.3-1 [82.1 kB]\n", - "Get:141 http://archive.ubuntu.com/ubuntu bionic/main amd64 libice-dev amd64 2:1.0.9-2 [46.8 kB]\n", - "Get:142 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsm-dev amd64 2:1.2.2-1 [16.2 kB]\n", - "Get:143 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpixman-1-dev amd64 0.34.0-2 [244 kB]\n", - "Get:144 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-render0-dev amd64 1.13-2~ubuntu18.04 [18.4 kB]\n", - "Get:145 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shm0-dev amd64 1.13-2~ubuntu18.04 [6684 B]\n", - "Get:146 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo2-dev amd64 1.15.10-2ubuntu0.1 [626 kB]\n", - "Get:147 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz-icu0 amd64 1.7.2-1ubuntu1 [5604 B]\n", - "Get:148 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz-gobject0 amd64 1.7.2-1ubuntu1 [13.4 kB]\n", - "Get:149 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-harfbuzz-0.0 amd64 1.7.2-1ubuntu1 [18.6 kB]\n", - "Get:150 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgraphite2-dev amd64 1.3.11-2 [14.5 kB]\n", - "Get:151 http://archive.ubuntu.com/ubuntu bionic/main amd64 libicu-le-hb0 amd64 1.0.3+git161113-4 [14.3 kB]\n", - "Get:152 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libiculx60 amd64 60.2-3ubuntu3.1 [19.0 kB]\n", - "Get:153 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 icu-devtools amd64 60.2-3ubuntu3.1 [179 kB]\n", - "Get:154 http://archive.ubuntu.com/ubuntu bionic/main amd64 libicu-le-hb-dev amd64 1.0.3+git161113-4 [29.5 kB]\n", - "Get:155 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libicu-dev amd64 60.2-3ubuntu3.1 [8889 kB]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:156 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz-dev amd64 1.7.2-1ubuntu1 [302 kB]\n", - "Get:157 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxft-dev amd64 2.3.2-1 [45.7 kB]\n", - "Get:158 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpango1.0-dev amd64 1.40.14-1ubuntu0.1 [288 kB]\n", - "Get:159 http://archive.ubuntu.com/ubuntu bionic/main amd64 libatk1.0-dev amd64 2.28.1-1 [79.9 kB]\n", - "Get:160 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xinerama-dev all 2018.4-4 [2628 B]\n", - "Get:161 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxinerama-dev amd64 2:1.1.3-1 [8404 B]\n", - "Get:162 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-fixes-dev all 1:2018.4-4 [2620 B]\n", - "Get:163 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxfixes-dev amd64 1:5.0.3-1 [11.0 kB]\n", - "Get:164 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxi-dev amd64 2:1.7.9-1 [186 kB]\n", - "Get:165 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-randr-dev all 2018.4-4 [2620 B]\n", - "Get:166 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrandr-dev amd64 2:1.5.1-1 [24.0 kB]\n", - "Get:167 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcursor-dev amd64 1:1.1.15-1 [26.5 kB]\n", - "Get:168 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-composite-dev all 1:2018.4-4 [2620 B]\n", - "Get:169 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcomposite-dev amd64 1:0.4.4-2 [9136 B]\n", - "Get:170 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-damage-dev all 1:2018.4-4 [2620 B]\n", - "Get:171 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdamage-dev amd64 1:1.1.4-3 [5028 B]\n", - "Get:172 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxml2-utils amd64 2.9.4+dfsg1-6.1ubuntu1.3 [35.9 kB]\n", - "Get:173 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-dev amd64 2.24.32-1ubuntu1 [2652 kB]\n", - "Get:174 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-gtk2-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1013 kB]\n", - "Get:175 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-math-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [344 kB]\n", - "Get:176 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-misc-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1087 kB]\n", - "Get:177 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-multimedia-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [208 kB]\n", - "Get:178 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-net-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [5416 kB]\n", - "Get:179 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc-source-3.0.4 all 3.0.4+dfsg-18ubuntu2 [17.0 MB]\n", - "Get:180 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-utils-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [2758 kB]\n", - "Get:181 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc-3.0.4 all 3.0.4+dfsg-18ubuntu2 [24.4 kB]\n", - "Get:182 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc all 3.0.4+dfsg-18ubuntu2 [23.9 kB]\n", - "Get:183 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc-source all 3.0.4+dfsg-18ubuntu2 [23.9 kB]\n", - "Get:184 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdw1 amd64 0.170-0.4ubuntu0.1 [203 kB]\n", - "Get:185 http://archive.ubuntu.com/ubuntu bionic/main amd64 libbabeltrace1 amd64 1.5.5-1 [154 kB]\n", - "Get:186 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gdb amd64 8.1-0ubuntu3.2 [2938 kB]\n", - "Get:187 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gdbserver amd64 8.1-0ubuntu3.2 [282 kB]\n", - "Get:188 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-ide-1.8 amd64 1.8.2+dfsg-3 [8386 kB]\n", - "Get:189 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-ide-gtk2-1.8 amd64 1.8.2+dfsg-3 [14.1 MB]\n", - "Get:190 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-ide all 1.8.2+dfsg-3 [28.9 kB]\n", - "Get:191 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-src-1.8 all 1.8.2+dfsg-3 [15.6 MB]\n", - "Get:192 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-nogui-1.8 amd64 1.8.2+dfsg-3 [6637 kB]\n", - "Get:193 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-gtk2-1.8 amd64 1.8.2+dfsg-3 [8523 kB]\n", - "Get:194 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-units-1.8 amd64 1.8.2+dfsg-3 [14.3 MB]\n", - "Get:195 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-utils-1.8 amd64 1.8.2+dfsg-3 [6694 kB]\n", - "Get:196 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-1.8 amd64 1.8.2+dfsg-3 [28.9 kB]\n", - "Get:197 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-1.8 all 1.8.2+dfsg-3 [29.4 kB]\n", - "Get:198 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus all 1.8.2+dfsg-3 [28.8 kB]\n", - "Get:199 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-doc-1.8 all 1.8.2+dfsg-3 [15.1 MB]\n", - "Get:200 http://archive.ubuntu.com/ubuntu bionic/universe amd64 liba52-0.7.4 amd64 0.7.4-19 [35.2 kB]\n", - "Get:201 http://archive.ubuntu.com/ubuntu bionic/universe amd64 liba52-0.7.4-dev amd64 0.7.4-19 [47.5 kB]\n", - "Get:202 http://archive.ubuntu.com/ubuntu bionic/main amd64 libapr1 amd64 1.6.3-2 [90.9 kB]\n", - "Get:203 http://archive.ubuntu.com/ubuntu bionic/main amd64 libaprutil1 amd64 1.6.1-2 [84.4 kB]\n", - "Get:204 http://archive.ubuntu.com/ubuntu bionic/main amd64 libarchive-cpio-perl all 0.10-1 [9644 B]\n", - "Get:205 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libasound2-data all 1.1.3-5ubuntu0.4 [38.0 kB]\n", - "Get:206 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libasound2 amd64 1.1.3-5ubuntu0.4 [361 kB]\n", - "Get:207 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libasound2-dev amd64 1.1.3-5ubuntu0.4 [123 kB]\n", - "Get:208 http://archive.ubuntu.com/ubuntu bionic/main amd64 libasyncns0 amd64 0.8-6 [12.1 kB]\n", - "Get:209 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcaca0 amd64 0.99.beta19-2ubuntu0.18.04.1 [203 kB]\n", - "Get:210 http://archive.ubuntu.com/ubuntu bionic/main amd64 libslang2-dev amd64 2.3.1a-3ubuntu1 [393 kB]\n", - "Get:211 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcaca-dev amd64 0.99.beta19-2ubuntu0.18.04.1 [747 kB]\n", - "Get:212 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libcdt5 amd64 2.40.1-2 [19.6 kB]\n", - "Get:213 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libcgraph6 amd64 2.40.1-2 [40.8 kB]\n", - "Get:214 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-amdgpu1 amd64 2.4.99-1ubuntu1~18.04.2 [18.2 kB]\n", - "Get:215 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpciaccess0 amd64 0.14-1 [17.9 kB]\n", - "Get:216 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-intel1 amd64 2.4.99-1ubuntu1~18.04.2 [59.9 kB]\n", - "Get:217 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-radeon1 amd64 2.4.99-1ubuntu1~18.04.2 [21.7 kB]\n", - "Get:218 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-nouveau2 amd64 2.4.99-1ubuntu1~18.04.2 [16.5 kB]\n", - "Get:219 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-dev amd64 2.4.99-1ubuntu1~18.04.2 [125 kB]\n", - "Get:220 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-server0 amd64 1.16.0-1ubuntu1.1~18.04.3 [29.6 kB]\n", - "Get:221 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgbm1 amd64 19.2.8-0ubuntu0~18.04.3 [28.1 kB]\n", - "Get:222 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglapi-mesa amd64 19.2.8-0ubuntu0~18.04.3 [26.5 kB]\n", - "Get:223 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-client0 amd64 1.16.0-1ubuntu1.1~18.04.3 [23.6 kB]\n", - "Get:224 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-xcb1 amd64 2:1.6.4-3ubuntu0.2 [9376 B]\n", - "Get:225 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri2-0 amd64 1.13-2~ubuntu18.04 [6920 B]\n", - "Get:226 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri3-0 amd64 1.13-2~ubuntu18.04 [6568 B]\n", - "Get:227 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-present0 amd64 1.13-2~ubuntu18.04 [5552 B]\n", - "Get:228 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-sync1 amd64 1.13-2~ubuntu18.04 [8808 B]\n", - "Get:229 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-xfixes0 amd64 1.13-2~ubuntu18.04 [9352 B]\n", - "Get:230 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxshmfence1 amd64 1.3-1 [5028 B]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:231 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libegl-mesa0 amd64 19.2.8-0ubuntu0~18.04.3 [95.1 kB]\n", - "Get:232 http://archive.ubuntu.com/ubuntu bionic/main amd64 libflac8 amd64 1.3.2-1 [213 kB]\n", - "Get:233 http://archive.ubuntu.com/ubuntu bionic/main amd64 libogg-dev amd64 1.3.2-1 [156 kB]\n", - "Get:234 http://archive.ubuntu.com/ubuntu bionic/main amd64 libflac-dev amd64 1.3.2-1 [260 kB]\n", - "Get:235 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsamplerate0 amd64 0.1.9-1 [938 kB]\n", - "Get:236 http://archive.ubuntu.com/ubuntu bionic/main amd64 libjack-jackd2-0 amd64 1.9.12~dfsg-2 [263 kB]\n", - "Get:237 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbis0a amd64 1.3.5-4.2 [86.4 kB]\n", - "Get:238 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbisenc2 amd64 1.3.5-4.2 [70.7 kB]\n", - "Get:239 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libsndfile1 amd64 1.0.28-4ubuntu0.18.04.1 [170 kB]\n", - "Get:240 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpulse0 amd64 1:11.1-1ubuntu7.4 [265 kB]\n", - "Get:241 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libfluidsynth1 amd64 1.1.9-1 [137 kB]\n", - "Get:242 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxpm4 amd64 1:3.5.12-1 [34.0 kB]\n", - "Get:243 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libforms2 amd64 1.2.3-1.3 [327 kB]\n", - "Get:244 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxpm-dev amd64 1:3.5.12-1 [87.4 kB]\n", - "Get:245 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libforms-dev amd64 1.2.3-1.3 [692 kB]\n", - "Get:246 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgail18 amd64 2.24.32-1ubuntu1 [14.2 kB]\n", - "Get:247 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgail-common amd64 2.24.32-1ubuntu1 [112 kB]\n", - "Get:248 http://archive.ubuntu.com/ubuntu bionic/main amd64 libwebp6 amd64 0.6.1-2 [185 kB]\n", - "Get:249 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgd3 amd64 2.2.5-4ubuntu0.4 [119 kB]\n", - "Get:250 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxt6 amd64 1:1.1.5-1 [160 kB]\n", - "Get:251 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxt-dev amd64 1:1.1.5-1 [395 kB]\n", - "Get:252 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libvpx5 amd64 1.7.0-3ubuntu0.18.04.1 [796 kB]\n", - "Get:253 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libvpx-dev amd64 1.7.0-3ubuntu0.18.04.1 [932 kB]\n", - "Get:254 http://archive.ubuntu.com/ubuntu bionic/main amd64 libjbig-dev amd64 2.1-3.1build1 [25.2 kB]\n", - "Get:255 http://archive.ubuntu.com/ubuntu bionic/main amd64 liblzma-dev amd64 5.2.2-1.3 [145 kB]\n", - "Get:256 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiffxx5 amd64 4.0.9-5ubuntu0.3 [5800 B]\n", - "Get:257 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiff5-dev amd64 4.0.9-5ubuntu0.3 [274 kB]\n", - "Get:258 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiff-dev amd64 4.0.9-5ubuntu0.3 [2260 B]\n", - "Get:259 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgd-dev amd64 2.2.5-4ubuntu0.4 [246 kB]\n", - "Get:260 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-bin amd64 2.36.11-2 [7864 B]\n", - "Get:261 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libllvm9 amd64 1:9-2~ubuntu18.04.2 [14.8 MB]\n", - "Get:262 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsensors4 amd64 1:3.4.0-4 [28.8 kB]\n", - "Get:263 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgl1-mesa-dri amd64 19.2.8-0ubuntu0~18.04.3 [8811 kB]\n", - "Get:264 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglvnd0 amd64 1.0.0-2ubuntu2.3 [47.0 kB]\n", - "Get:265 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgles1 amd64 1.0.0-2ubuntu2.3 [11.2 kB]\n", - "Get:266 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-glx0 amd64 1.13-2~ubuntu18.04 [22.1 kB]\n", - "Get:267 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglx-mesa0 amd64 19.2.8-0ubuntu0~18.04.3 [139 kB]\n", - "Get:268 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglx0 amd64 1.0.0-2ubuntu2.3 [28.1 kB]\n", - "Get:269 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgl1 amd64 1.0.0-2ubuntu2.3 [86.2 kB]\n", - "Get:270 http://archive.ubuntu.com/ubuntu bionic/main amd64 libglu1-mesa amd64 9.0.0-2.1build1 [168 kB]\n", - "Get:271 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 mesa-common-dev amd64 19.2.8-0ubuntu0~18.04.3 [651 kB]\n", - "Get:272 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglvnd-core-dev amd64 1.0.0-2ubuntu2.3 [12.8 kB]\n", - "Get:273 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libegl1 amd64 1.0.0-2ubuntu2.3 [32.0 kB]\n", - "Get:274 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgles2 amd64 1.0.0-2ubuntu2.3 [17.3 kB]\n", - "Get:275 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libopengl0 amd64 1.0.0-2ubuntu2.3 [31.3 kB]\n", - "Get:276 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglvnd-dev amd64 1.0.0-2ubuntu2.3 [3416 B]\n", - "Get:277 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-xcb-dev amd64 2:1.6.4-3ubuntu0.2 [9756 B]\n", - "Get:278 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri3-dev amd64 1.13-2~ubuntu18.04 [7384 B]\n", - "Get:279 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-randr0 amd64 1.13-2~ubuntu18.04 [16.4 kB]\n", - "Get:280 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-randr0-dev amd64 1.13-2~ubuntu18.04 [20.4 kB]\n", - "Get:281 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shape0 amd64 1.13-2~ubuntu18.04 [5972 B]\n", - "Get:282 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shape0-dev amd64 1.13-2~ubuntu18.04 [7144 B]\n", - "Get:283 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-xfixes0-dev amd64 1.13-2~ubuntu18.04 [11.7 kB]\n", - "Get:284 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-sync-dev amd64 1.13-2~ubuntu18.04 [10.6 kB]\n", - "Get:285 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-present-dev amd64 1.13-2~ubuntu18.04 [6972 B]\n", - "Get:286 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxshmfence-dev amd64 1.3-1 [3692 B]\n", - "Get:287 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri2-0-dev amd64 1.13-2~ubuntu18.04 [8472 B]\n", - "Get:288 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-glx0-dev amd64 1.13-2~ubuntu18.04 [27.9 kB]\n", - "Get:289 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xf86vidmode-dev all 2018.4-4 [2632 B]\n", - "Get:290 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86vm-dev amd64 1:1.1.4-1 [13.3 kB]\n", - "Get:291 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgl1-mesa-dev amd64 19.2.8-0ubuntu0~18.04.3 [6180 B]\n", - "Get:292 http://archive.ubuntu.com/ubuntu bionic/main amd64 libglu1-mesa-dev amd64 9.0.0-2.1build1 [206 kB]\n", - "Get:293 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgmpxx4ldbl amd64 2:6.1.2+dfsg-2 [8964 B]\n", - "Get:294 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgmp-dev amd64 2:6.1.2+dfsg-2 [316 kB]\n", - "Get:295 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgts-0.7-5 amd64 0.7.6+darcs121130-4 [150 kB]\n", - "Get:296 http://archive.ubuntu.com/ubuntu bionic/main amd64 libltdl7 amd64 2.4.6-2 [38.8 kB]\n", - "Get:297 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libpathplan4 amd64 2.40.1-2 [22.6 kB]\n", - "Get:298 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgvc6 amd64 2.40.1-2 [601 kB]\n", - "Get:299 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgvpr2 amd64 2.40.1-2 [169 kB]\n", - "Get:300 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libxdot4 amd64 2.40.1-2 [15.7 kB]\n", - "Get:301 http://archive.ubuntu.com/ubuntu bionic/universe amd64 liblab-gamut1 amd64 2.40.1-2 [178 kB]\n", - "Get:302 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgvc6-plugins-gtk amd64 2.40.1-2 [18.2 kB]\n", - "Get:303 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgraphviz-dev amd64 2.40.1-2 [57.3 kB]\n", - "Get:304 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-bin amd64 2.24.32-1ubuntu1 [7536 B]\n", - "Get:305 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgts-bin amd64 0.7.6+darcs121130-4 [41.3 kB]\n", - "Get:306 http://archive.ubuntu.com/ubuntu bionic/main amd64 libltdl-dev amd64 2.4.6-2 [162 kB]\n", - "Get:307 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libmad0 amd64 0.15.1b-9ubuntu18.04.1 [64.6 kB]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:308 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libmad0-dev amd64 0.15.1b-9ubuntu18.04.1 [64.4 kB]\n", - "Get:309 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsys-hostname-long-perl all 1.5-1 [11.7 kB]\n", - "Get:310 http://archive.ubuntu.com/ubuntu bionic/main amd64 libmail-sendmail-perl all 0.80-1 [22.6 kB]\n", - "Get:311 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmikmod-config amd64 3.3.11.1-3 [5184 B]\n", - "Get:312 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libopenal-data all 1:1.18.2-2 [102 kB]\n", - "Get:313 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsndio6.1 amd64 1.1.0-3 [23.4 kB]\n", - "Get:314 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libopenal1 amd64 1:1.18.2-2 [266 kB]\n", - "Get:315 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-cursor0 amd64 1.16.0-1ubuntu1.1~18.04.3 [10.1 kB]\n", - "Get:316 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-egl1 amd64 1.16.0-1ubuntu1.1~18.04.3 [5464 B]\n", - "Get:317 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-egl1-mesa amd64 19.2.8-0ubuntu0~18.04.3 [6892 B]\n", - "Get:318 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxkbcommon0 amd64 0.8.2-1~ubuntu18.04.1 [97.8 kB]\n", - "Get:319 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libsdl2-2.0-0 amd64 2.0.8+dfsg1-1ubuntu1.18.04.4 [382 kB]\n", - "Get:320 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmikmod3 amd64 3.3.11.1-3 [113 kB]\n", - "Get:321 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmikmod-dev amd64 3.3.11.1-3 [196 kB]\n", - "Get:322 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmodplug1 amd64 1:0.8.9.0-1 [150 kB]\n", - "Get:323 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmodplug-dev amd64 1:0.8.9.0-1 [14.6 kB]\n", - "Get:324 http://archive.ubuntu.com/ubuntu bionic/main amd64 libproxy1v5 amd64 0.4.15-1 [49.5 kB]\n", - "Get:325 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libproxy-tools amd64 0.4.15-1 [5312 B]\n", - "Get:326 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpulse-mainloop-glib0 amd64 1:11.1-1ubuntu7.4 [22.1 kB]\n", - "Get:327 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpulse-dev amd64 1:11.1-1ubuntu7.4 [81.5 kB]\n", - "Get:328 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libsdl1.2debian amd64 1.2.15+dfsg2-0.1ubuntu0.1 [175 kB]\n", - "Get:329 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbisfile3 amd64 1.3.5-4.2 [16.0 kB]\n", - "Get:330 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsdl-mixer1.2 amd64 1.2.12-14 [72.0 kB]\n", - "Get:331 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libsdl1.2-dev amd64 1.2.15+dfsg2-0.1ubuntu0.1 [706 kB]\n", - "Get:332 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbis-dev amd64 1.3.5-4.2 [321 kB]\n", - "Get:333 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsdl-mixer1.2-dev amd64 1.2.12-14 [90.6 kB]\n", - "Get:334 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libserf-1-1 amd64 1.3.9-6 [44.4 kB]\n", - "Get:335 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsvn1 amd64 1.9.7-4ubuntu1 [1183 kB]\n", - "Get:336 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlccore9 amd64 3.0.8-0ubuntu18.04.1 [434 kB]\n", - "Get:337 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlc5 amd64 3.0.8-0ubuntu18.04.1 [68.0 kB]\n", - "Get:338 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlc-bin amd64 3.0.8-0ubuntu18.04.1 [17.1 kB]\n", - "Get:339 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlc-dev amd64 3.0.8-0ubuntu18.04.1 [60.1 kB]\n", - "Get:340 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-doc all 2:1.6.4-3ubuntu0.2 [2065 kB]\n", - "Get:341 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xf86dga-dev all 2018.4-4 [2624 B]\n", - "Get:342 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86dga-dev amd64 2:1.1.4-1 [17.6 kB]\n", - "Get:343 http://archive.ubuntu.com/ubuntu bionic/universe amd64 subversion amd64 1.9.7-4ubuntu1 [834 kB]\n", - "Get:344 http://archive.ubuntu.com/ubuntu bionic/universe amd64 timgm6mb-soundfont all 1.3-2 [5423 kB]\n", - "Get:345 http://archive.ubuntu.com/ubuntu bionic/main amd64 zip amd64 3.0-11build1 [167 kB]\n", - "Get:346 http://archive.ubuntu.com/ubuntu bionic/main amd64 libc6-dbg amd64 2.27-3ubuntu1 [5161 kB]\n", - "Get:347 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libdca0 amd64 0.0.5-10 [100.0 kB]\n", - "Get:348 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libdca-dev amd64 0.0.5-10 [90.2 kB]\n", - "Get:349 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libdts-dev amd64 0.0.5-10 [3068 B]\n", - "Fetched 243 MB in 16s (15.2 MB/s) \n", - "debconf: delaying package configuration, since apt-utils is not installed\n", - "Selecting previously unselected package libxau6:amd64.\n", - "(Reading database ... 17806 files and directories currently installed.)\n", - "Preparing to unpack .../00-libxau6_1%3a1.0.8-1_amd64.deb ...\n", - "Unpacking libxau6:amd64 (1:1.0.8-1) ...\n", - "Selecting previously unselected package libxdmcp6:amd64.\n", - "Preparing to unpack .../01-libxdmcp6_1%3a1.1.2-3_amd64.deb ...\n", - "Unpacking libxdmcp6:amd64 (1:1.1.2-3) ...\n", - "Selecting previously unselected package libxcb1:amd64.\n", - "Preparing to unpack .../02-libxcb1_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libx11-data.\n", - "Preparing to unpack .../03-libx11-data_2%3a1.6.4-3ubuntu0.2_all.deb ...\n", - "Unpacking libx11-data (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libx11-6:amd64.\n", - "Preparing to unpack .../04-libx11-6_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-6:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libxext6:amd64.\n", - "Preparing to unpack .../05-libxext6_2%3a1.3.3-1_amd64.deb ...\n", - "Unpacking libxext6:amd64 (2:1.3.3-1) ...\n", - "Selecting previously unselected package bsdmainutils.\n", - "Preparing to unpack .../06-bsdmainutils_11.1.2ubuntu1_amd64.deb ...\n", - "Unpacking bsdmainutils (11.1.2ubuntu1) ...\n", - "Selecting previously unselected package groff-base.\n", - "Preparing to unpack .../07-groff-base_1.22.3-10_amd64.deb ...\n", - "Unpacking groff-base (1.22.3-10) ...\n", - "Selecting previously unselected package libpipeline1:amd64.\n", - "Preparing to unpack .../08-libpipeline1_1.5.0-1_amd64.deb ...\n", - "Unpacking libpipeline1:amd64 (1.5.0-1) ...\n", - "Selecting previously unselected package man-db.\n", - "Preparing to unpack .../09-man-db_2.8.3-2ubuntu0.1_amd64.deb ...\n", - "Unpacking man-db (2.8.3-2ubuntu0.1) ...\n", - "Selecting previously unselected package fonts-dejavu-core.\n", - "Preparing to unpack .../10-fonts-dejavu-core_2.37-1_all.deb ...\n", - "Unpacking fonts-dejavu-core (2.37-1) ...\n", - "Selecting previously unselected package fontconfig-config.\n", - "Preparing to unpack .../11-fontconfig-config_2.12.6-0ubuntu2_all.deb ...\n", - "Unpacking fontconfig-config (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package libfontconfig1:amd64.\n", - "Preparing to unpack .../12-libfontconfig1_2.12.6-0ubuntu2_amd64.deb ...\n", - "Unpacking libfontconfig1:amd64 (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package fontconfig.\n", - "Preparing to unpack .../13-fontconfig_2.12.6-0ubuntu2_amd64.deb ...\n", - "Unpacking fontconfig (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package liblzo2-2:amd64.\n", - "Preparing to unpack .../14-liblzo2-2_2.08-1.2_amd64.deb ...\n", - "Unpacking liblzo2-2:amd64 (2.08-1.2) ...\n", - "Selecting previously unselected package libogg0:amd64.\n", - "Preparing to unpack .../15-libogg0_1.3.2-1_amd64.deb ...\n", - "Unpacking libogg0:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package x11-common.\n", - "Preparing to unpack .../16-x11-common_1%3a7.7+19ubuntu7.1_all.deb ...\n", - "dpkg-query: no packages found matching nux-tools\n", - "Unpacking x11-common (1:7.7+19ubuntu7.1) ...\n", - "Selecting previously unselected package libice6:amd64.\n", - "Preparing to unpack .../17-libice6_2%3a1.0.9-2_amd64.deb ...\n", - "Unpacking libice6:amd64 (2:1.0.9-2) ...\n", - "Selecting previously unselected package libsm6:amd64.\n", - "Preparing to unpack .../18-libsm6_2%3a1.2.2-1_amd64.deb ...\n", - "Unpacking libsm6:amd64 (2:1.2.2-1) ...\n", - "Selecting previously unselected package libxrender1:amd64.\n", - "Preparing to unpack .../19-libxrender1_1%3a0.9.10-1_amd64.deb ...\n", - "Unpacking libxrender1:amd64 (1:0.9.10-1) ...\n", - "Selecting previously unselected package libxft2:amd64.\n", - "Preparing to unpack .../20-libxft2_2.3.2-1_amd64.deb ...\n", - "Unpacking libxft2:amd64 (2.3.2-1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libxinerama1:amd64.\n", - "Preparing to unpack .../21-libxinerama1_2%3a1.1.3-1_amd64.deb ...\n", - "Unpacking libxinerama1:amd64 (2:1.1.3-1) ...\n", - "Selecting previously unselected package libxss1:amd64.\n", - "Preparing to unpack .../22-libxss1_1%3a1.2.2-1_amd64.deb ...\n", - "Unpacking libxss1:amd64 (1:1.2.2-1) ...\n", - "Selecting previously unselected package libxxf86dga1:amd64.\n", - "Preparing to unpack .../23-libxxf86dga1_2%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86dga1:amd64 (2:1.1.4-1) ...\n", - "Selecting previously unselected package libxxf86vm1:amd64.\n", - "Preparing to unpack .../24-libxxf86vm1_1%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86vm1:amd64 (1:1.1.4-1) ...\n", - "Selecting previously unselected package libpython2.7-minimal:amd64.\n", - "Preparing to unpack .../25-libpython2.7-minimal_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking libpython2.7-minimal:amd64 (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package python2.7-minimal.\n", - "Preparing to unpack .../26-python2.7-minimal_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking python2.7-minimal (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package python-minimal.\n", - "Preparing to unpack .../27-python-minimal_2.7.15~rc1-1_amd64.deb ...\n", - "Unpacking python-minimal (2.7.15~rc1-1) ...\n", - "Selecting previously unselected package libpython2.7-stdlib:amd64.\n", - "Preparing to unpack .../28-libpython2.7-stdlib_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking libpython2.7-stdlib:amd64 (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package python2.7.\n", - "Preparing to unpack .../29-python2.7_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking python2.7 (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package libpython-stdlib:amd64.\n", - "Preparing to unpack .../30-libpython-stdlib_2.7.15~rc1-1_amd64.deb ...\n", - "Unpacking libpython-stdlib:amd64 (2.7.15~rc1-1) ...\n", - "Setting up libpython2.7-minimal:amd64 (2.7.17-1~18.04) ...\n", - "Setting up python2.7-minimal (2.7.17-1~18.04) ...\n", - "Linking and byte-compiling packages for runtime python2.7...\n", - "Setting up python-minimal (2.7.15~rc1-1) ...\n", - "Selecting previously unselected package python.\n", - "(Reading database ... 19614 files and directories currently installed.)\n", - "Preparing to unpack .../000-python_2.7.15~rc1-1_amd64.deb ...\n", - "Unpacking python (2.7.15~rc1-1) ...\n", - "Selecting previously unselected package libmagic-mgc.\n", - "Preparing to unpack .../001-libmagic-mgc_1%3a5.32-2ubuntu0.3_amd64.deb ...\n", - "Unpacking libmagic-mgc (1:5.32-2ubuntu0.3) ...\n", - "Selecting previously unselected package libmagic1:amd64.\n", - "Preparing to unpack .../002-libmagic1_1%3a5.32-2ubuntu0.3_amd64.deb ...\n", - "Unpacking libmagic1:amd64 (1:5.32-2ubuntu0.3) ...\n", - "Selecting previously unselected package file.\n", - "Preparing to unpack .../003-file_1%3a5.32-2ubuntu0.3_amd64.deb ...\n", - "Unpacking file (1:5.32-2ubuntu0.3) ...\n", - "Selecting previously unselected package libelf1:amd64.\n", - "Preparing to unpack .../004-libelf1_0.170-0.4ubuntu0.1_amd64.deb ...\n", - "Unpacking libelf1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Preparing to unpack .../005-libglib2.0-0_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-0:amd64 (2.56.4-0ubuntu0.18.04.6) over (2.56.4-0ubuntu0.18.04.4) ...\n", - "Selecting previously unselected package libglib2.0-data.\n", - "Preparing to unpack .../006-libglib2.0-data_2.56.4-0ubuntu0.18.04.6_all.deb ...\n", - "Unpacking libglib2.0-data (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package libicu60:amd64.\n", - "Preparing to unpack .../007-libicu60_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking libicu60:amd64 (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package libidn11:amd64.\n", - "Preparing to unpack .../008-libidn11_1.33-2.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libidn11:amd64 (1.33-2.1ubuntu1.2) ...\n", - "Selecting previously unselected package libslang2:amd64.\n", - "Preparing to unpack .../009-libslang2_2.3.1a-3ubuntu1_amd64.deb ...\n", - "Unpacking libslang2:amd64 (2.3.1a-3ubuntu1) ...\n", - "Selecting previously unselected package libxml2:amd64.\n", - "Preparing to unpack .../010-libxml2_2.9.4+dfsg1-6.1ubuntu1.3_amd64.deb ...\n", - "Unpacking libxml2:amd64 (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Selecting previously unselected package shared-mime-info.\n", - "Preparing to unpack .../011-shared-mime-info_1.9-2_amd64.deb ...\n", - "Unpacking shared-mime-info (1.9-2) ...\n", - "Selecting previously unselected package xkb-data.\n", - "Preparing to unpack .../012-xkb-data_2.23.1-1ubuntu1.18.04.1_all.deb ...\n", - "Unpacking xkb-data (2.23.1-1ubuntu1.18.04.1) ...\n", - "Selecting previously unselected package gettext-base.\n", - "Preparing to unpack .../013-gettext-base_0.19.8.1-6ubuntu0.3_amd64.deb ...\n", - "Unpacking gettext-base (0.19.8.1-6ubuntu0.3) ...\n", - "Selecting previously unselected package libdrm-common.\n", - "Preparing to unpack .../014-libdrm-common_2.4.99-1ubuntu1~18.04.2_all.deb ...\n", - "Unpacking libdrm-common (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm2:amd64.\n", - "Preparing to unpack .../015-libdrm2_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package hicolor-icon-theme.\n", - "Preparing to unpack .../016-hicolor-icon-theme_0.17-2_all.deb ...\n", - "Unpacking hicolor-icon-theme (0.17-2) ...\n", - "Selecting previously unselected package libjbig0:amd64.\n", - "Preparing to unpack .../017-libjbig0_2.1-3.1build1_amd64.deb ...\n", - "Unpacking libjbig0:amd64 (2.1-3.1build1) ...\n", - "Selecting previously unselected package libtiff5:amd64.\n", - "Preparing to unpack .../018-libtiff5_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiff5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-common.\n", - "Preparing to unpack .../019-libgdk-pixbuf2.0-common_2.36.11-2_all.deb ...\n", - "Unpacking libgdk-pixbuf2.0-common (2.36.11-2) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-0:amd64.\n", - "Preparing to unpack .../020-libgdk-pixbuf2.0-0_2.36.11-2_amd64.deb ...\n", - "Unpacking libgdk-pixbuf2.0-0:amd64 (2.36.11-2) ...\n", - "Selecting previously unselected package gtk-update-icon-cache.\n", - "Preparing to unpack .../021-gtk-update-icon-cache_3.22.30-1ubuntu4_amd64.deb ...\n", - "No diversion 'diversion of /usr/sbin/update-icon-caches to /usr/sbin/update-icon-caches.gtk2 by libgtk-3-bin', none removed.\n", - "No diversion 'diversion of /usr/share/man/man8/update-icon-caches.8.gz to /usr/share/man/man8/update-icon-caches.gtk2.8.gz by libgtk-3-bin', none removed.\n", - "Unpacking gtk-update-icon-cache (3.22.30-1ubuntu4) ...\n", - "Selecting previously unselected package libpixman-1-0:amd64.\n", - "Preparing to unpack .../022-libpixman-1-0_0.34.0-2_amd64.deb ...\n", - "Unpacking libpixman-1-0:amd64 (0.34.0-2) ...\n", - "Selecting previously unselected package libxcb-render0:amd64.\n", - "Preparing to unpack .../023-libxcb-render0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-render0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shm0:amd64.\n", - "Preparing to unpack .../024-libxcb-shm0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shm0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libcairo2:amd64.\n", - "Preparing to unpack .../025-libcairo2_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libcroco3:amd64.\n", - "Preparing to unpack .../026-libcroco3_0.6.12-2_amd64.deb ...\n", - "Unpacking libcroco3:amd64 (0.6.12-2) ...\n", - "Selecting previously unselected package libthai-data.\n", - "Preparing to unpack .../027-libthai-data_0.1.27-2_all.deb ...\n", - "Unpacking libthai-data (0.1.27-2) ...\n", - "Selecting previously unselected package libdatrie1:amd64.\n", - "Preparing to unpack .../028-libdatrie1_0.2.10-7_amd64.deb ...\n", - "Unpacking libdatrie1:amd64 (0.2.10-7) ...\n", - "Selecting previously unselected package libthai0:amd64.\n", - "Preparing to unpack .../029-libthai0_0.1.27-2_amd64.deb ...\n", - "Unpacking libthai0:amd64 (0.1.27-2) ...\n", - "Selecting previously unselected package libpango-1.0-0:amd64.\n", - "Preparing to unpack .../030-libpango-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpango-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package libgraphite2-3:amd64.\n", - "Preparing to unpack .../031-libgraphite2-3_1.3.11-2_amd64.deb ...\n", - "Unpacking libgraphite2-3:amd64 (1.3.11-2) ...\n", - "Selecting previously unselected package libharfbuzz0b:amd64.\n", - "Preparing to unpack .../032-libharfbuzz0b_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking libharfbuzz0b:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libpangoft2-1.0-0:amd64.\n", - "Preparing to unpack .../033-libpangoft2-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpangoft2-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libpangocairo-1.0-0:amd64.\n", - "Preparing to unpack .../034-libpangocairo-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpangocairo-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package librsvg2-2:amd64.\n", - "Preparing to unpack .../035-librsvg2-2_2.40.20-2_amd64.deb ...\n", - "Unpacking librsvg2-2:amd64 (2.40.20-2) ...\n", - "Selecting previously unselected package librsvg2-common:amd64.\n", - "Preparing to unpack .../036-librsvg2-common_2.40.20-2_amd64.deb ...\n", - "Unpacking librsvg2-common:amd64 (2.40.20-2) ...\n", - "Selecting previously unselected package humanity-icon-theme.\n", - "Preparing to unpack .../037-humanity-icon-theme_0.6.15_all.deb ...\n", - "Unpacking humanity-icon-theme (0.6.15) ...\n", - "Selecting previously unselected package ubuntu-mono.\n", - "Preparing to unpack .../038-ubuntu-mono_16.10+18.04.20181005-0ubuntu1_all.deb ...\n", - "Unpacking ubuntu-mono (16.10+18.04.20181005-0ubuntu1) ...\n", - "Selecting previously unselected package adwaita-icon-theme.\n", - "Preparing to unpack .../039-adwaita-icon-theme_3.28.0-1ubuntu1_all.deb ...\n", - "Unpacking adwaita-icon-theme (3.28.0-1ubuntu1) ...\n", - "Selecting previously unselected package libsigsegv2:amd64.\n", - "Preparing to unpack .../040-libsigsegv2_2.12-1_amd64.deb ...\n", - "Unpacking libsigsegv2:amd64 (2.12-1) ...\n", - "Selecting previously unselected package m4.\n", - "Preparing to unpack .../041-m4_1.4.18-1_amd64.deb ...\n", - "Unpacking m4 (1.4.18-1) ...\n", - "Selecting previously unselected package autoconf.\n", - "Preparing to unpack .../042-autoconf_2.69-11_all.deb ...\n", - "Unpacking autoconf (2.69-11) ...\n", - "Selecting previously unselected package autotools-dev.\n", - "Preparing to unpack .../043-autotools-dev_20180224.1_all.deb ...\n", - "Unpacking autotools-dev (20180224.1) ...\n", - "Selecting previously unselected package automake.\n", - "Preparing to unpack .../044-automake_1%3a1.15.1-3ubuntu2_all.deb ...\n", - "Unpacking automake (1:1.15.1-3ubuntu2) ...\n", - "Selecting previously unselected package autopoint.\n", - "Preparing to unpack .../045-autopoint_0.19.8.1-6ubuntu0.3_all.deb ...\n", - "Unpacking autopoint (0.19.8.1-6ubuntu0.3) ...\n", - "Selecting previously unselected package libtool.\n", - "Preparing to unpack .../046-libtool_2.4.6-2_all.deb ...\n", - "Unpacking libtool (2.4.6-2) ...\n", - "Selecting previously unselected package dh-autoreconf.\n", - "Preparing to unpack .../047-dh-autoreconf_17_all.deb ...\n", - "Unpacking dh-autoreconf (17) ...\n", - "Selecting previously unselected package libarchive-zip-perl.\n", - "Preparing to unpack .../048-libarchive-zip-perl_1.60-1ubuntu0.1_all.deb ...\n", - "Unpacking libarchive-zip-perl (1.60-1ubuntu0.1) ...\n", - "Selecting previously unselected package libfile-stripnondeterminism-perl.\n", - "Preparing to unpack .../049-libfile-stripnondeterminism-perl_0.040-1.1~build1_all.deb ...\n", - "Unpacking libfile-stripnondeterminism-perl (0.040-1.1~build1) ...\n", - "Selecting previously unselected package libtimedate-perl.\n", - "Preparing to unpack .../050-libtimedate-perl_2.3000-2_all.deb ...\n", - "Unpacking libtimedate-perl (2.3000-2) ...\n", - "Selecting previously unselected package dh-strip-nondeterminism.\n", - "Preparing to unpack .../051-dh-strip-nondeterminism_0.040-1.1~build1_all.deb ...\n", - "Unpacking dh-strip-nondeterminism (0.040-1.1~build1) ...\n", - "Selecting previously unselected package gettext.\n", - "Preparing to unpack .../052-gettext_0.19.8.1-6ubuntu0.3_amd64.deb ...\n", - "Unpacking gettext (0.19.8.1-6ubuntu0.3) ...\n", - "Selecting previously unselected package intltool-debian.\n", - "Preparing to unpack .../053-intltool-debian_0.35.0+20060710.4_all.deb ...\n", - "Unpacking intltool-debian (0.35.0+20060710.4) ...\n", - "Selecting previously unselected package po-debconf.\n", - "Preparing to unpack .../054-po-debconf_1.0.20_all.deb ...\n", - "Unpacking po-debconf (1.0.20) ...\n", - "Selecting previously unselected package debhelper.\n", - "Preparing to unpack .../055-debhelper_11.1.6ubuntu2_all.deb ...\n", - "Unpacking debhelper (11.1.6ubuntu2) ...\n", - "Selecting previously unselected package fp-units-rtl-3.0.4:amd64.\n", - "Preparing to unpack .../056-fp-units-rtl-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-rtl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-compiler-3.0.4:amd64.\n", - "Preparing to unpack .../057-fp-compiler-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-compiler-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-docs-3.0.4.\n", - "Preparing to unpack .../058-fp-docs-3.0.4_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fp-docs-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-ide-3.0.4.\n", - "Preparing to unpack .../059-fp-ide-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-ide-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-base-3.0.4:amd64.\n", - "Preparing to unpack .../060-fp-units-base-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-base-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-db-3.0.4:amd64.\n", - "Preparing to unpack .../061-fp-units-db-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-db-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-fcl-3.0.4:amd64.\n", - "Preparing to unpack .../062-fp-units-fcl-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-fcl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-fv-3.0.4:amd64.\n", - "Preparing to unpack .../063-fp-units-fv-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-fv-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-gfx-3.0.4:amd64.\n", - "Preparing to unpack .../064-fp-units-gfx-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-gfx-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package libgtk2.0-common.\n", - "Preparing to unpack .../065-libgtk2.0-common_2.24.32-1ubuntu1_all.deb ...\n", - "Unpacking libgtk2.0-common (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libatk1.0-data.\n", - "Preparing to unpack .../066-libatk1.0-data_2.28.1-1_all.deb ...\n", - "Unpacking libatk1.0-data (2.28.1-1) ...\n", - "Selecting previously unselected package libatk1.0-0:amd64.\n", - "Preparing to unpack .../067-libatk1.0-0_2.28.1-1_amd64.deb ...\n", - "Unpacking libatk1.0-0:amd64 (2.28.1-1) ...\n", - "Selecting previously unselected package libavahi-common-data:amd64.\n", - "Preparing to unpack .../068-libavahi-common-data_0.7-3.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libavahi-common-data:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Selecting previously unselected package libavahi-common3:amd64.\n", - "Preparing to unpack .../069-libavahi-common3_0.7-3.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libavahi-common3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Selecting previously unselected package libavahi-client3:amd64.\n", - "Preparing to unpack .../070-libavahi-client3_0.7-3.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libavahi-client3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Selecting previously unselected package libcups2:amd64.\n", - "Preparing to unpack .../071-libcups2_2.2.7-1ubuntu2.7_amd64.deb ...\n", - "Unpacking libcups2:amd64 (2.2.7-1ubuntu2.7) ...\n", - "Selecting previously unselected package libxcomposite1:amd64.\n", - "Preparing to unpack .../072-libxcomposite1_1%3a0.4.4-2_amd64.deb ...\n", - "Unpacking libxcomposite1:amd64 (1:0.4.4-2) ...\n", - "Selecting previously unselected package libxfixes3:amd64.\n", - "Preparing to unpack .../073-libxfixes3_1%3a5.0.3-1_amd64.deb ...\n", - "Unpacking libxfixes3:amd64 (1:5.0.3-1) ...\n", - "Selecting previously unselected package libxcursor1:amd64.\n", - "Preparing to unpack .../074-libxcursor1_1%3a1.1.15-1_amd64.deb ...\n", - "Unpacking libxcursor1:amd64 (1:1.1.15-1) ...\n", - "Selecting previously unselected package libxdamage1:amd64.\n", - "Preparing to unpack .../075-libxdamage1_1%3a1.1.4-3_amd64.deb ...\n", - "Unpacking libxdamage1:amd64 (1:1.1.4-3) ...\n", - "Selecting previously unselected package libxi6:amd64.\n", - "Preparing to unpack .../076-libxi6_2%3a1.7.9-1_amd64.deb ...\n", - "Unpacking libxi6:amd64 (2:1.7.9-1) ...\n", - "Selecting previously unselected package libxrandr2:amd64.\n", - "Preparing to unpack .../077-libxrandr2_2%3a1.5.1-1_amd64.deb ...\n", - "Unpacking libxrandr2:amd64 (2:1.5.1-1) ...\n", - "Selecting previously unselected package libgtk2.0-0:amd64.\n", - "Preparing to unpack .../078-libgtk2.0-0_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgtk2.0-0:amd64 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package gir1.2-atk-1.0:amd64.\n", - "Preparing to unpack .../079-gir1.2-atk-1.0_2.28.1-1_amd64.deb ...\n", - "Unpacking gir1.2-atk-1.0:amd64 (2.28.1-1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package gir1.2-freedesktop:amd64.\n", - "Preparing to unpack .../080-gir1.2-freedesktop_1.56.1-1_amd64.deb ...\n", - "Unpacking gir1.2-freedesktop:amd64 (1.56.1-1) ...\n", - "Selecting previously unselected package gir1.2-gdkpixbuf-2.0:amd64.\n", - "Preparing to unpack .../081-gir1.2-gdkpixbuf-2.0_2.36.11-2_amd64.deb ...\n", - "Unpacking gir1.2-gdkpixbuf-2.0:amd64 (2.36.11-2) ...\n", - "Selecting previously unselected package libpangoxft-1.0-0:amd64.\n", - "Preparing to unpack .../082-libpangoxft-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpangoxft-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package gir1.2-pango-1.0:amd64.\n", - "Preparing to unpack .../083-gir1.2-pango-1.0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking gir1.2-pango-1.0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package gir1.2-gtk-2.0.\n", - "Preparing to unpack .../084-gir1.2-gtk-2.0_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking gir1.2-gtk-2.0 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libglib2.0-bin.\n", - "Preparing to unpack .../085-libglib2.0-bin_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package libglib2.0-dev-bin.\n", - "Preparing to unpack .../086-libglib2.0-dev-bin_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-dev-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package libpcre16-3:amd64.\n", - "Preparing to unpack .../087-libpcre16-3_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcre16-3:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libpcre32-3:amd64.\n", - "Preparing to unpack .../088-libpcre32-3_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcre32-3:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libpcrecpp0v5:amd64.\n", - "Preparing to unpack .../089-libpcrecpp0v5_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcrecpp0v5:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libpcre3-dev:amd64.\n", - "Preparing to unpack .../090-libpcre3-dev_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcre3-dev:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libglib2.0-dev:amd64.\n", - "Preparing to unpack .../091-libglib2.0-dev_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-dev:amd64 (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package xorg-sgml-doctools.\n", - "Preparing to unpack .../092-xorg-sgml-doctools_1%3a1.11-1_all.deb ...\n", - "Unpacking xorg-sgml-doctools (1:1.11-1) ...\n", - "Selecting previously unselected package x11proto-dev.\n", - "Preparing to unpack .../093-x11proto-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-dev (2018.4-4) ...\n", - "Selecting previously unselected package x11proto-core-dev.\n", - "Preparing to unpack .../094-x11proto-core-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-core-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxau-dev:amd64.\n", - "Preparing to unpack .../095-libxau-dev_1%3a1.0.8-1_amd64.deb ...\n", - "Unpacking libxau-dev:amd64 (1:1.0.8-1) ...\n", - "Selecting previously unselected package libxdmcp-dev:amd64.\n", - "Preparing to unpack .../096-libxdmcp-dev_1%3a1.1.2-3_amd64.deb ...\n", - "Unpacking libxdmcp-dev:amd64 (1:1.1.2-3) ...\n", - "Selecting previously unselected package x11proto-input-dev.\n", - "Preparing to unpack .../097-x11proto-input-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-input-dev (2018.4-4) ...\n", - "Selecting previously unselected package xtrans-dev.\n", - "Preparing to unpack .../098-xtrans-dev_1.3.5-1_all.deb ...\n", - "Unpacking xtrans-dev (1.3.5-1) ...\n", - "Selecting previously unselected package libpthread-stubs0-dev:amd64.\n", - "Preparing to unpack .../099-libpthread-stubs0-dev_0.3-4_amd64.deb ...\n", - "Unpacking libpthread-stubs0-dev:amd64 (0.3-4) ...\n", - "Selecting previously unselected package libxcb1-dev:amd64.\n", - "Preparing to unpack .../100-libxcb1-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb1-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libx11-dev:amd64.\n", - "Preparing to unpack .../101-libx11-dev_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-dev.\n", - "Preparing to unpack .../102-libgdk-pixbuf2.0-dev_2.36.11-2_amd64.deb ...\n", - "Unpacking libgdk-pixbuf2.0-dev (2.36.11-2) ...\n", - "Selecting previously unselected package libcairo-gobject2:amd64.\n", - "Preparing to unpack .../103-libcairo-gobject2_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo-gobject2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libcairo-script-interpreter2:amd64.\n", - "Preparing to unpack .../104-libcairo-script-interpreter2_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo-script-interpreter2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libfontconfig1-dev:amd64.\n", - "Preparing to unpack .../105-libfontconfig1-dev_2.12.6-0ubuntu2_amd64.deb ...\n", - "Unpacking libfontconfig1-dev:amd64 (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package libxrender-dev:amd64.\n", - "Preparing to unpack .../106-libxrender-dev_1%3a0.9.10-1_amd64.deb ...\n", - "Unpacking libxrender-dev:amd64 (1:0.9.10-1) ...\n", - "Selecting previously unselected package x11proto-xext-dev.\n", - "Preparing to unpack .../107-x11proto-xext-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xext-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxext-dev:amd64.\n", - "Preparing to unpack .../108-libxext-dev_2%3a1.3.3-1_amd64.deb ...\n", - "Unpacking libxext-dev:amd64 (2:1.3.3-1) ...\n", - "Selecting previously unselected package libice-dev:amd64.\n", - "Preparing to unpack .../109-libice-dev_2%3a1.0.9-2_amd64.deb ...\n", - "Unpacking libice-dev:amd64 (2:1.0.9-2) ...\n", - "Selecting previously unselected package libsm-dev:amd64.\n", - "Preparing to unpack .../110-libsm-dev_2%3a1.2.2-1_amd64.deb ...\n", - "Unpacking libsm-dev:amd64 (2:1.2.2-1) ...\n", - "Selecting previously unselected package libpixman-1-dev:amd64.\n", - "Preparing to unpack .../111-libpixman-1-dev_0.34.0-2_amd64.deb ...\n", - "Unpacking libpixman-1-dev:amd64 (0.34.0-2) ...\n", - "Selecting previously unselected package libxcb-render0-dev:amd64.\n", - "Preparing to unpack .../112-libxcb-render0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-render0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shm0-dev:amd64.\n", - "Preparing to unpack .../113-libxcb-shm0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shm0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libcairo2-dev:amd64.\n", - "Preparing to unpack .../114-libcairo2-dev_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo2-dev:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libharfbuzz-icu0:amd64.\n", - "Preparing to unpack .../115-libharfbuzz-icu0_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking libharfbuzz-icu0:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libharfbuzz-gobject0:amd64.\n", - "Preparing to unpack .../116-libharfbuzz-gobject0_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking libharfbuzz-gobject0:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package gir1.2-harfbuzz-0.0:amd64.\n", - "Preparing to unpack .../117-gir1.2-harfbuzz-0.0_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking gir1.2-harfbuzz-0.0:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libgraphite2-dev:amd64.\n", - "Preparing to unpack .../118-libgraphite2-dev_1.3.11-2_amd64.deb ...\n", - "Unpacking libgraphite2-dev:amd64 (1.3.11-2) ...\n", - "Selecting previously unselected package libicu-le-hb0:amd64.\n", - "Preparing to unpack .../119-libicu-le-hb0_1.0.3+git161113-4_amd64.deb ...\n", - "Unpacking libicu-le-hb0:amd64 (1.0.3+git161113-4) ...\n", - "Selecting previously unselected package libiculx60:amd64.\n", - "Preparing to unpack .../120-libiculx60_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking libiculx60:amd64 (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package icu-devtools.\n", - "Preparing to unpack .../121-icu-devtools_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking icu-devtools (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package libicu-le-hb-dev:amd64.\n", - "Preparing to unpack .../122-libicu-le-hb-dev_1.0.3+git161113-4_amd64.deb ...\n", - "Unpacking libicu-le-hb-dev:amd64 (1.0.3+git161113-4) ...\n", - "Selecting previously unselected package libicu-dev.\n", - "Preparing to unpack .../123-libicu-dev_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking libicu-dev (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package libharfbuzz-dev:amd64.\n", - "Preparing to unpack .../124-libharfbuzz-dev_1.7.2-1ubuntu1_amd64.deb ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Unpacking libharfbuzz-dev:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libxft-dev.\n", - "Preparing to unpack .../125-libxft-dev_2.3.2-1_amd64.deb ...\n", - "Unpacking libxft-dev (2.3.2-1) ...\n", - "Selecting previously unselected package libpango1.0-dev.\n", - "Preparing to unpack .../126-libpango1.0-dev_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpango1.0-dev (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package libatk1.0-dev:amd64.\n", - "Preparing to unpack .../127-libatk1.0-dev_2.28.1-1_amd64.deb ...\n", - "Unpacking libatk1.0-dev:amd64 (2.28.1-1) ...\n", - "Selecting previously unselected package x11proto-xinerama-dev.\n", - "Preparing to unpack .../128-x11proto-xinerama-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xinerama-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxinerama-dev:amd64.\n", - "Preparing to unpack .../129-libxinerama-dev_2%3a1.1.3-1_amd64.deb ...\n", - "Unpacking libxinerama-dev:amd64 (2:1.1.3-1) ...\n", - "Selecting previously unselected package x11proto-fixes-dev.\n", - "Preparing to unpack .../130-x11proto-fixes-dev_1%3a2018.4-4_all.deb ...\n", - "Unpacking x11proto-fixes-dev (1:2018.4-4) ...\n", - "Selecting previously unselected package libxfixes-dev:amd64.\n", - "Preparing to unpack .../131-libxfixes-dev_1%3a5.0.3-1_amd64.deb ...\n", - "Unpacking libxfixes-dev:amd64 (1:5.0.3-1) ...\n", - "Selecting previously unselected package libxi-dev:amd64.\n", - "Preparing to unpack .../132-libxi-dev_2%3a1.7.9-1_amd64.deb ...\n", - "Unpacking libxi-dev:amd64 (2:1.7.9-1) ...\n", - "Selecting previously unselected package x11proto-randr-dev.\n", - "Preparing to unpack .../133-x11proto-randr-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-randr-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxrandr-dev:amd64.\n", - "Preparing to unpack .../134-libxrandr-dev_2%3a1.5.1-1_amd64.deb ...\n", - "Unpacking libxrandr-dev:amd64 (2:1.5.1-1) ...\n", - "Selecting previously unselected package libxcursor-dev:amd64.\n", - "Preparing to unpack .../135-libxcursor-dev_1%3a1.1.15-1_amd64.deb ...\n", - "Unpacking libxcursor-dev:amd64 (1:1.1.15-1) ...\n", - "Selecting previously unselected package x11proto-composite-dev.\n", - "Preparing to unpack .../136-x11proto-composite-dev_1%3a2018.4-4_all.deb ...\n", - "Unpacking x11proto-composite-dev (1:2018.4-4) ...\n", - "Selecting previously unselected package libxcomposite-dev:amd64.\n", - "Preparing to unpack .../137-libxcomposite-dev_1%3a0.4.4-2_amd64.deb ...\n", - "Unpacking libxcomposite-dev:amd64 (1:0.4.4-2) ...\n", - "Selecting previously unselected package x11proto-damage-dev.\n", - "Preparing to unpack .../138-x11proto-damage-dev_1%3a2018.4-4_all.deb ...\n", - "Unpacking x11proto-damage-dev (1:2018.4-4) ...\n", - "Selecting previously unselected package libxdamage-dev:amd64.\n", - "Preparing to unpack .../139-libxdamage-dev_1%3a1.1.4-3_amd64.deb ...\n", - "Unpacking libxdamage-dev:amd64 (1:1.1.4-3) ...\n", - "Selecting previously unselected package libxml2-utils.\n", - "Preparing to unpack .../140-libxml2-utils_2.9.4+dfsg1-6.1ubuntu1.3_amd64.deb ...\n", - "Unpacking libxml2-utils (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Selecting previously unselected package libgtk2.0-dev.\n", - "Preparing to unpack .../141-libgtk2.0-dev_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgtk2.0-dev (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package fp-units-gtk2-3.0.4:amd64.\n", - "Preparing to unpack .../142-fp-units-gtk2-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-gtk2-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-math-3.0.4:amd64.\n", - "Preparing to unpack .../143-fp-units-math-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-math-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-misc-3.0.4:amd64.\n", - "Preparing to unpack .../144-fp-units-misc-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-misc-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-multimedia-3.0.4:amd64.\n", - "Preparing to unpack .../145-fp-units-multimedia-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-multimedia-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-net-3.0.4:amd64.\n", - "Preparing to unpack .../146-fp-units-net-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-net-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc-source-3.0.4.\n", - "Preparing to unpack .../147-fpc-source-3.0.4_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc-source-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-utils-3.0.4.\n", - "Preparing to unpack .../148-fp-utils-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-utils-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc-3.0.4.\n", - "Preparing to unpack .../149-fpc-3.0.4_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc.\n", - "Preparing to unpack .../150-fpc_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc-source.\n", - "Preparing to unpack .../151-fpc-source_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc-source (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package libdw1:amd64.\n", - "Preparing to unpack .../152-libdw1_0.170-0.4ubuntu0.1_amd64.deb ...\n", - "Unpacking libdw1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Selecting previously unselected package libbabeltrace1:amd64.\n", - "Preparing to unpack .../153-libbabeltrace1_1.5.5-1_amd64.deb ...\n", - "Unpacking libbabeltrace1:amd64 (1.5.5-1) ...\n", - "Selecting previously unselected package gdb.\n", - "Preparing to unpack .../154-gdb_8.1-0ubuntu3.2_amd64.deb ...\n", - "Unpacking gdb (8.1-0ubuntu3.2) ...\n", - "Selecting previously unselected package gdbserver.\n", - "Preparing to unpack .../155-gdbserver_8.1-0ubuntu3.2_amd64.deb ...\n", - "Unpacking gdbserver (8.1-0ubuntu3.2) ...\n", - "Selecting previously unselected package lazarus-ide-1.8.\n", - "Preparing to unpack .../156-lazarus-ide-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lazarus-ide-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-ide-gtk2-1.8.\n", - "Preparing to unpack .../157-lazarus-ide-gtk2-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lazarus-ide-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-ide.\n", - "Preparing to unpack .../158-lazarus-ide_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-ide (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-src-1.8.\n", - "Preparing to unpack .../159-lazarus-src-1.8_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-src-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-nogui-1.8.\n", - "Preparing to unpack .../160-lcl-nogui-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-nogui-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-gtk2-1.8.\n", - "Preparing to unpack .../161-lcl-gtk2-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-units-1.8.\n", - "Preparing to unpack .../162-lcl-units-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-units-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-utils-1.8.\n", - "Preparing to unpack .../163-lcl-utils-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-utils-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-1.8.\n", - "Preparing to unpack .../164-lcl-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-1.8.\n", - "Preparing to unpack .../165-lazarus-1.8_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus.\n", - "Preparing to unpack .../166-lazarus_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-doc-1.8.\n", - "Preparing to unpack .../167-lazarus-doc-1.8_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-doc-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package liba52-0.7.4:amd64.\n", - "Preparing to unpack .../168-liba52-0.7.4_0.7.4-19_amd64.deb ...\n", - "Unpacking liba52-0.7.4:amd64 (0.7.4-19) ...\n", - "Selecting previously unselected package liba52-0.7.4-dev.\n", - "Preparing to unpack .../169-liba52-0.7.4-dev_0.7.4-19_amd64.deb ...\n", - "Unpacking liba52-0.7.4-dev (0.7.4-19) ...\n", - "Selecting previously unselected package libapr1:amd64.\n", - "Preparing to unpack .../170-libapr1_1.6.3-2_amd64.deb ...\n", - "Unpacking libapr1:amd64 (1.6.3-2) ...\n", - "Selecting previously unselected package libaprutil1:amd64.\n", - "Preparing to unpack .../171-libaprutil1_1.6.1-2_amd64.deb ...\n", - "Unpacking libaprutil1:amd64 (1.6.1-2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libarchive-cpio-perl.\n", - "Preparing to unpack .../172-libarchive-cpio-perl_0.10-1_all.deb ...\n", - "Unpacking libarchive-cpio-perl (0.10-1) ...\n", - "Selecting previously unselected package libasound2-data.\n", - "Preparing to unpack .../173-libasound2-data_1.1.3-5ubuntu0.4_all.deb ...\n", - "Unpacking libasound2-data (1.1.3-5ubuntu0.4) ...\n", - "Selecting previously unselected package libasound2:amd64.\n", - "Preparing to unpack .../174-libasound2_1.1.3-5ubuntu0.4_amd64.deb ...\n", - "Unpacking libasound2:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Selecting previously unselected package libasound2-dev:amd64.\n", - "Preparing to unpack .../175-libasound2-dev_1.1.3-5ubuntu0.4_amd64.deb ...\n", - "Unpacking libasound2-dev:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Selecting previously unselected package libasyncns0:amd64.\n", - "Preparing to unpack .../176-libasyncns0_0.8-6_amd64.deb ...\n", - "Unpacking libasyncns0:amd64 (0.8-6) ...\n", - "Selecting previously unselected package libcaca0:amd64.\n", - "Preparing to unpack .../177-libcaca0_0.99.beta19-2ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libcaca0:amd64 (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libslang2-dev:amd64.\n", - "Preparing to unpack .../178-libslang2-dev_2.3.1a-3ubuntu1_amd64.deb ...\n", - "Unpacking libslang2-dev:amd64 (2.3.1a-3ubuntu1) ...\n", - "Selecting previously unselected package libcaca-dev.\n", - "Preparing to unpack .../179-libcaca-dev_0.99.beta19-2ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libcaca-dev (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libcdt5.\n", - "Preparing to unpack .../180-libcdt5_2.40.1-2_amd64.deb ...\n", - "Unpacking libcdt5 (2.40.1-2) ...\n", - "Selecting previously unselected package libcgraph6.\n", - "Preparing to unpack .../181-libcgraph6_2.40.1-2_amd64.deb ...\n", - "Unpacking libcgraph6 (2.40.1-2) ...\n", - "Selecting previously unselected package libdrm-amdgpu1:amd64.\n", - "Preparing to unpack .../182-libdrm-amdgpu1_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-amdgpu1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libpciaccess0:amd64.\n", - "Preparing to unpack .../183-libpciaccess0_0.14-1_amd64.deb ...\n", - "Unpacking libpciaccess0:amd64 (0.14-1) ...\n", - "Selecting previously unselected package libdrm-intel1:amd64.\n", - "Preparing to unpack .../184-libdrm-intel1_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-intel1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm-radeon1:amd64.\n", - "Preparing to unpack .../185-libdrm-radeon1_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-radeon1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm-nouveau2:amd64.\n", - "Preparing to unpack .../186-libdrm-nouveau2_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-nouveau2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm-dev:amd64.\n", - "Preparing to unpack .../187-libdrm-dev_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-dev:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libwayland-server0:amd64.\n", - "Preparing to unpack .../188-libwayland-server0_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-server0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libgbm1:amd64.\n", - "Preparing to unpack .../189-libgbm1_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libgbm1:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglapi-mesa:amd64.\n", - "Preparing to unpack .../190-libglapi-mesa_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libglapi-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libwayland-client0:amd64.\n", - "Preparing to unpack .../191-libwayland-client0_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-client0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libx11-xcb1:amd64.\n", - "Preparing to unpack .../192-libx11-xcb1_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-xcb1:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libxcb-dri2-0:amd64.\n", - "Preparing to unpack .../193-libxcb-dri2-0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri2-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-dri3-0:amd64.\n", - "Preparing to unpack .../194-libxcb-dri3-0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri3-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-present0:amd64.\n", - "Preparing to unpack .../195-libxcb-present0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-present0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-sync1:amd64.\n", - "Preparing to unpack .../196-libxcb-sync1_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-sync1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-xfixes0:amd64.\n", - "Preparing to unpack .../197-libxcb-xfixes0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-xfixes0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxshmfence1:amd64.\n", - "Preparing to unpack .../198-libxshmfence1_1.3-1_amd64.deb ...\n", - "Unpacking libxshmfence1:amd64 (1.3-1) ...\n", - "Selecting previously unselected package libegl-mesa0:amd64.\n", - "Preparing to unpack .../199-libegl-mesa0_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libegl-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libflac8:amd64.\n", - "Preparing to unpack .../200-libflac8_1.3.2-1_amd64.deb ...\n", - "Unpacking libflac8:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package libogg-dev:amd64.\n", - "Preparing to unpack .../201-libogg-dev_1.3.2-1_amd64.deb ...\n", - "Unpacking libogg-dev:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package libflac-dev:amd64.\n", - "Preparing to unpack .../202-libflac-dev_1.3.2-1_amd64.deb ...\n", - "Unpacking libflac-dev:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package libsamplerate0:amd64.\n", - "Preparing to unpack .../203-libsamplerate0_0.1.9-1_amd64.deb ...\n", - "Unpacking libsamplerate0:amd64 (0.1.9-1) ...\n", - "Selecting previously unselected package libjack-jackd2-0:amd64.\n", - "Preparing to unpack .../204-libjack-jackd2-0_1.9.12~dfsg-2_amd64.deb ...\n", - "Unpacking libjack-jackd2-0:amd64 (1.9.12~dfsg-2) ...\n", - "Selecting previously unselected package libvorbis0a:amd64.\n", - "Preparing to unpack .../205-libvorbis0a_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbis0a:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libvorbisenc2:amd64.\n", - "Preparing to unpack .../206-libvorbisenc2_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbisenc2:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libsndfile1:amd64.\n", - "Preparing to unpack .../207-libsndfile1_1.0.28-4ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libsndfile1:amd64 (1.0.28-4ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libpulse0:amd64.\n", - "Preparing to unpack .../208-libpulse0_1%3a11.1-1ubuntu7.4_amd64.deb ...\n", - "Unpacking libpulse0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Selecting previously unselected package libfluidsynth1:amd64.\n", - "Preparing to unpack .../209-libfluidsynth1_1.1.9-1_amd64.deb ...\n", - "Unpacking libfluidsynth1:amd64 (1.1.9-1) ...\n", - "Selecting previously unselected package libxpm4:amd64.\n", - "Preparing to unpack .../210-libxpm4_1%3a3.5.12-1_amd64.deb ...\n", - "Unpacking libxpm4:amd64 (1:3.5.12-1) ...\n", - "Selecting previously unselected package libforms2.\n", - "Preparing to unpack .../211-libforms2_1.2.3-1.3_amd64.deb ...\n", - "Unpacking libforms2 (1.2.3-1.3) ...\n", - "Selecting previously unselected package libxpm-dev:amd64.\n", - "Preparing to unpack .../212-libxpm-dev_1%3a3.5.12-1_amd64.deb ...\n", - "Unpacking libxpm-dev:amd64 (1:3.5.12-1) ...\n", - "Selecting previously unselected package libforms-dev.\n", - "Preparing to unpack .../213-libforms-dev_1.2.3-1.3_amd64.deb ...\n", - "Unpacking libforms-dev (1.2.3-1.3) ...\n", - "Selecting previously unselected package libgail18:amd64.\n", - "Preparing to unpack .../214-libgail18_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgail18:amd64 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libgail-common:amd64.\n", - "Preparing to unpack .../215-libgail-common_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgail-common:amd64 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libwebp6:amd64.\n", - "Preparing to unpack .../216-libwebp6_0.6.1-2_amd64.deb ...\n", - "Unpacking libwebp6:amd64 (0.6.1-2) ...\n", - "Selecting previously unselected package libgd3:amd64.\n", - "Preparing to unpack .../217-libgd3_2.2.5-4ubuntu0.4_amd64.deb ...\n", - "Unpacking libgd3:amd64 (2.2.5-4ubuntu0.4) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libxt6:amd64.\n", - "Preparing to unpack .../218-libxt6_1%3a1.1.5-1_amd64.deb ...\n", - "Unpacking libxt6:amd64 (1:1.1.5-1) ...\n", - "Selecting previously unselected package libxt-dev:amd64.\n", - "Preparing to unpack .../219-libxt-dev_1%3a1.1.5-1_amd64.deb ...\n", - "Unpacking libxt-dev:amd64 (1:1.1.5-1) ...\n", - "Selecting previously unselected package libvpx5:amd64.\n", - "Preparing to unpack .../220-libvpx5_1.7.0-3ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libvpx5:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libvpx-dev:amd64.\n", - "Preparing to unpack .../221-libvpx-dev_1.7.0-3ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libvpx-dev:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libjbig-dev:amd64.\n", - "Preparing to unpack .../222-libjbig-dev_2.1-3.1build1_amd64.deb ...\n", - "Unpacking libjbig-dev:amd64 (2.1-3.1build1) ...\n", - "Selecting previously unselected package liblzma-dev:amd64.\n", - "Preparing to unpack .../223-liblzma-dev_5.2.2-1.3_amd64.deb ...\n", - "Unpacking liblzma-dev:amd64 (5.2.2-1.3) ...\n", - "Selecting previously unselected package libtiffxx5:amd64.\n", - "Preparing to unpack .../224-libtiffxx5_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiffxx5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libtiff5-dev:amd64.\n", - "Preparing to unpack .../225-libtiff5-dev_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiff5-dev:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libtiff-dev.\n", - "Preparing to unpack .../226-libtiff-dev_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiff-dev (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libgd-dev:amd64.\n", - "Preparing to unpack .../227-libgd-dev_2.2.5-4ubuntu0.4_amd64.deb ...\n", - "Unpacking libgd-dev:amd64 (2.2.5-4ubuntu0.4) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-bin.\n", - "Preparing to unpack .../228-libgdk-pixbuf2.0-bin_2.36.11-2_amd64.deb ...\n", - "Unpacking libgdk-pixbuf2.0-bin (2.36.11-2) ...\n", - "Selecting previously unselected package libllvm9:amd64.\n", - "Preparing to unpack .../229-libllvm9_1%3a9-2~ubuntu18.04.2_amd64.deb ...\n", - "Unpacking libllvm9:amd64 (1:9-2~ubuntu18.04.2) ...\n", - "Selecting previously unselected package libsensors4:amd64.\n", - "Preparing to unpack .../230-libsensors4_1%3a3.4.0-4_amd64.deb ...\n", - "Unpacking libsensors4:amd64 (1:3.4.0-4) ...\n", - "Selecting previously unselected package libgl1-mesa-dri:amd64.\n", - "Preparing to unpack .../231-libgl1-mesa-dri_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libgl1-mesa-dri:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglvnd0:amd64.\n", - "Preparing to unpack .../232-libglvnd0_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglvnd0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libgles1:amd64.\n", - "Preparing to unpack .../233-libgles1_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libgles1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libxcb-glx0:amd64.\n", - "Preparing to unpack .../234-libxcb-glx0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-glx0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libglx-mesa0:amd64.\n", - "Preparing to unpack .../235-libglx-mesa0_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libglx-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglx0:amd64.\n", - "Preparing to unpack .../236-libglx0_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglx0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libgl1:amd64.\n", - "Preparing to unpack .../237-libgl1_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libgl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libglu1-mesa:amd64.\n", - "Preparing to unpack .../238-libglu1-mesa_9.0.0-2.1build1_amd64.deb ...\n", - "Unpacking libglu1-mesa:amd64 (9.0.0-2.1build1) ...\n", - "Selecting previously unselected package mesa-common-dev:amd64.\n", - "Preparing to unpack .../239-mesa-common-dev_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking mesa-common-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglvnd-core-dev:amd64.\n", - "Preparing to unpack .../240-libglvnd-core-dev_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglvnd-core-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libegl1:amd64.\n", - "Preparing to unpack .../241-libegl1_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libegl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libgles2:amd64.\n", - "Preparing to unpack .../242-libgles2_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libgles2:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libopengl0:amd64.\n", - "Preparing to unpack .../243-libopengl0_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libopengl0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libglvnd-dev:amd64.\n", - "Preparing to unpack .../244-libglvnd-dev_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglvnd-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libx11-xcb-dev:amd64.\n", - "Preparing to unpack .../245-libx11-xcb-dev_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-xcb-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libxcb-dri3-dev:amd64.\n", - "Preparing to unpack .../246-libxcb-dri3-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri3-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-randr0:amd64.\n", - "Preparing to unpack .../247-libxcb-randr0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-randr0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-randr0-dev:amd64.\n", - "Preparing to unpack .../248-libxcb-randr0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-randr0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shape0:amd64.\n", - "Preparing to unpack .../249-libxcb-shape0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shape0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shape0-dev:amd64.\n", - "Preparing to unpack .../250-libxcb-shape0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shape0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-xfixes0-dev:amd64.\n", - "Preparing to unpack .../251-libxcb-xfixes0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-xfixes0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-sync-dev:amd64.\n", - "Preparing to unpack .../252-libxcb-sync-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-sync-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-present-dev:amd64.\n", - "Preparing to unpack .../253-libxcb-present-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-present-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxshmfence-dev:amd64.\n", - "Preparing to unpack .../254-libxshmfence-dev_1.3-1_amd64.deb ...\n", - "Unpacking libxshmfence-dev:amd64 (1.3-1) ...\n", - "Selecting previously unselected package libxcb-dri2-0-dev:amd64.\n", - "Preparing to unpack .../255-libxcb-dri2-0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri2-0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-glx0-dev:amd64.\n", - "Preparing to unpack .../256-libxcb-glx0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-glx0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package x11proto-xf86vidmode-dev.\n", - "Preparing to unpack .../257-x11proto-xf86vidmode-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xf86vidmode-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxxf86vm-dev:amd64.\n", - "Preparing to unpack .../258-libxxf86vm-dev_1%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86vm-dev:amd64 (1:1.1.4-1) ...\n", - "Selecting previously unselected package libgl1-mesa-dev:amd64.\n", - "Preparing to unpack .../259-libgl1-mesa-dev_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libgl1-mesa-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglu1-mesa-dev:amd64.\n", - "Preparing to unpack .../260-libglu1-mesa-dev_9.0.0-2.1build1_amd64.deb ...\n", - "Unpacking libglu1-mesa-dev:amd64 (9.0.0-2.1build1) ...\n", - "Selecting previously unselected package libgmpxx4ldbl:amd64.\n", - "Preparing to unpack .../261-libgmpxx4ldbl_2%3a6.1.2+dfsg-2_amd64.deb ...\n", - "Unpacking libgmpxx4ldbl:amd64 (2:6.1.2+dfsg-2) ...\n", - "Selecting previously unselected package libgmp-dev:amd64.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Preparing to unpack .../262-libgmp-dev_2%3a6.1.2+dfsg-2_amd64.deb ...\n", - "Unpacking libgmp-dev:amd64 (2:6.1.2+dfsg-2) ...\n", - "Selecting previously unselected package libgts-0.7-5:amd64.\n", - "Preparing to unpack .../263-libgts-0.7-5_0.7.6+darcs121130-4_amd64.deb ...\n", - "Unpacking libgts-0.7-5:amd64 (0.7.6+darcs121130-4) ...\n", - "Selecting previously unselected package libltdl7:amd64.\n", - "Preparing to unpack .../264-libltdl7_2.4.6-2_amd64.deb ...\n", - "Unpacking libltdl7:amd64 (2.4.6-2) ...\n", - "Selecting previously unselected package libpathplan4.\n", - "Preparing to unpack .../265-libpathplan4_2.40.1-2_amd64.deb ...\n", - "Unpacking libpathplan4 (2.40.1-2) ...\n", - "Selecting previously unselected package libgvc6.\n", - "Preparing to unpack .../266-libgvc6_2.40.1-2_amd64.deb ...\n", - "Unpacking libgvc6 (2.40.1-2) ...\n", - "Selecting previously unselected package libgvpr2.\n", - "Preparing to unpack .../267-libgvpr2_2.40.1-2_amd64.deb ...\n", - "Unpacking libgvpr2 (2.40.1-2) ...\n", - "Selecting previously unselected package libxdot4.\n", - "Preparing to unpack .../268-libxdot4_2.40.1-2_amd64.deb ...\n", - "Unpacking libxdot4 (2.40.1-2) ...\n", - "Selecting previously unselected package liblab-gamut1.\n", - "Preparing to unpack .../269-liblab-gamut1_2.40.1-2_amd64.deb ...\n", - "Unpacking liblab-gamut1 (2.40.1-2) ...\n", - "Selecting previously unselected package libgvc6-plugins-gtk.\n", - "Preparing to unpack .../270-libgvc6-plugins-gtk_2.40.1-2_amd64.deb ...\n", - "Unpacking libgvc6-plugins-gtk (2.40.1-2) ...\n", - "Selecting previously unselected package libgraphviz-dev.\n", - "Preparing to unpack .../271-libgraphviz-dev_2.40.1-2_amd64.deb ...\n", - "Unpacking libgraphviz-dev (2.40.1-2) ...\n", - "Selecting previously unselected package libgtk2.0-bin.\n", - "Preparing to unpack .../272-libgtk2.0-bin_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgtk2.0-bin (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libgts-bin.\n", - "Preparing to unpack .../273-libgts-bin_0.7.6+darcs121130-4_amd64.deb ...\n", - "Unpacking libgts-bin (0.7.6+darcs121130-4) ...\n", - "Selecting previously unselected package libltdl-dev:amd64.\n", - "Preparing to unpack .../274-libltdl-dev_2.4.6-2_amd64.deb ...\n", - "Unpacking libltdl-dev:amd64 (2.4.6-2) ...\n", - "Selecting previously unselected package libmad0:amd64.\n", - "Preparing to unpack .../275-libmad0_0.15.1b-9ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libmad0:amd64 (0.15.1b-9ubuntu18.04.1) ...\n", - "Selecting previously unselected package libmad0-dev.\n", - "Preparing to unpack .../276-libmad0-dev_0.15.1b-9ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libmad0-dev (0.15.1b-9ubuntu18.04.1) ...\n", - "Selecting previously unselected package libsys-hostname-long-perl.\n", - "Preparing to unpack .../277-libsys-hostname-long-perl_1.5-1_all.deb ...\n", - "Unpacking libsys-hostname-long-perl (1.5-1) ...\n", - "Selecting previously unselected package libmail-sendmail-perl.\n", - "Preparing to unpack .../278-libmail-sendmail-perl_0.80-1_all.deb ...\n", - "Unpacking libmail-sendmail-perl (0.80-1) ...\n", - "Selecting previously unselected package libmikmod-config.\n", - "Preparing to unpack .../279-libmikmod-config_3.3.11.1-3_amd64.deb ...\n", - "Unpacking libmikmod-config (3.3.11.1-3) ...\n", - "Selecting previously unselected package libopenal-data.\n", - "Preparing to unpack .../280-libopenal-data_1%3a1.18.2-2_all.deb ...\n", - "Unpacking libopenal-data (1:1.18.2-2) ...\n", - "Selecting previously unselected package libsndio6.1:amd64.\n", - "Preparing to unpack .../281-libsndio6.1_1.1.0-3_amd64.deb ...\n", - "Unpacking libsndio6.1:amd64 (1.1.0-3) ...\n", - "Selecting previously unselected package libopenal1:amd64.\n", - "Preparing to unpack .../282-libopenal1_1%3a1.18.2-2_amd64.deb ...\n", - "Unpacking libopenal1:amd64 (1:1.18.2-2) ...\n", - "Selecting previously unselected package libwayland-cursor0:amd64.\n", - "Preparing to unpack .../283-libwayland-cursor0_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-cursor0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libwayland-egl1:amd64.\n", - "Preparing to unpack .../284-libwayland-egl1_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-egl1:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libwayland-egl1-mesa:amd64.\n", - "Preparing to unpack .../285-libwayland-egl1-mesa_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-egl1-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libxkbcommon0:amd64.\n", - "Preparing to unpack .../286-libxkbcommon0_0.8.2-1~ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libxkbcommon0:amd64 (0.8.2-1~ubuntu18.04.1) ...\n", - "Selecting previously unselected package libsdl2-2.0-0:amd64.\n", - "Preparing to unpack .../287-libsdl2-2.0-0_2.0.8+dfsg1-1ubuntu1.18.04.4_amd64.deb ...\n", - "Unpacking libsdl2-2.0-0:amd64 (2.0.8+dfsg1-1ubuntu1.18.04.4) ...\n", - "Selecting previously unselected package libmikmod3:amd64.\n", - "Preparing to unpack .../288-libmikmod3_3.3.11.1-3_amd64.deb ...\n", - "Unpacking libmikmod3:amd64 (3.3.11.1-3) ...\n", - "Selecting previously unselected package libmikmod-dev:amd64.\n", - "Preparing to unpack .../289-libmikmod-dev_3.3.11.1-3_amd64.deb ...\n", - "Unpacking libmikmod-dev:amd64 (3.3.11.1-3) ...\n", - "Selecting previously unselected package libmodplug1:amd64.\n", - "Preparing to unpack .../290-libmodplug1_1%3a0.8.9.0-1_amd64.deb ...\n", - "Unpacking libmodplug1:amd64 (1:0.8.9.0-1) ...\n", - "Selecting previously unselected package libmodplug-dev:amd64.\n", - "Preparing to unpack .../291-libmodplug-dev_1%3a0.8.9.0-1_amd64.deb ...\n", - "Unpacking libmodplug-dev:amd64 (1:0.8.9.0-1) ...\n", - "Selecting previously unselected package libproxy1v5:amd64.\n", - "Preparing to unpack .../292-libproxy1v5_0.4.15-1_amd64.deb ...\n", - "Unpacking libproxy1v5:amd64 (0.4.15-1) ...\n", - "Selecting previously unselected package libproxy-tools.\n", - "Preparing to unpack .../293-libproxy-tools_0.4.15-1_amd64.deb ...\n", - "Unpacking libproxy-tools (0.4.15-1) ...\n", - "Selecting previously unselected package libpulse-mainloop-glib0:amd64.\n", - "Preparing to unpack .../294-libpulse-mainloop-glib0_1%3a11.1-1ubuntu7.4_amd64.deb ...\n", - "Unpacking libpulse-mainloop-glib0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Selecting previously unselected package libpulse-dev:amd64.\n", - "Preparing to unpack .../295-libpulse-dev_1%3a11.1-1ubuntu7.4_amd64.deb ...\n", - "Unpacking libpulse-dev:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Selecting previously unselected package libsdl1.2debian:amd64.\n", - "Preparing to unpack .../296-libsdl1.2debian_1.2.15+dfsg2-0.1ubuntu0.1_amd64.deb ...\n", - "Unpacking libsdl1.2debian:amd64 (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Selecting previously unselected package libvorbisfile3:amd64.\n", - "Preparing to unpack .../297-libvorbisfile3_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbisfile3:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libsdl-mixer1.2:amd64.\n", - "Preparing to unpack .../298-libsdl-mixer1.2_1.2.12-14_amd64.deb ...\n", - "Unpacking libsdl-mixer1.2:amd64 (1.2.12-14) ...\n", - "Selecting previously unselected package libsdl1.2-dev.\n", - "Preparing to unpack .../299-libsdl1.2-dev_1.2.15+dfsg2-0.1ubuntu0.1_amd64.deb ...\n", - "Unpacking libsdl1.2-dev (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Selecting previously unselected package libvorbis-dev:amd64.\n", - "Preparing to unpack .../300-libvorbis-dev_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbis-dev:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libsdl-mixer1.2-dev:amd64.\n", - "Preparing to unpack .../301-libsdl-mixer1.2-dev_1.2.12-14_amd64.deb ...\n", - "Unpacking libsdl-mixer1.2-dev:amd64 (1.2.12-14) ...\n", - "Selecting previously unselected package libserf-1-1:amd64.\n", - "Preparing to unpack .../302-libserf-1-1_1.3.9-6_amd64.deb ...\n", - "Unpacking libserf-1-1:amd64 (1.3.9-6) ...\n", - "Selecting previously unselected package libsvn1:amd64.\n", - "Preparing to unpack .../303-libsvn1_1.9.7-4ubuntu1_amd64.deb ...\n", - "Unpacking libsvn1:amd64 (1.9.7-4ubuntu1) ...\n", - "Selecting previously unselected package libvlccore9:amd64.\n", - "Preparing to unpack .../304-libvlccore9_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlccore9:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libvlc5:amd64.\n", - "Preparing to unpack .../305-libvlc5_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlc5:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libvlc-bin:amd64.\n", - "Preparing to unpack .../306-libvlc-bin_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlc-bin:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libvlc-dev:amd64.\n", - "Preparing to unpack .../307-libvlc-dev_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlc-dev:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libx11-doc.\n", - "Preparing to unpack .../308-libx11-doc_2%3a1.6.4-3ubuntu0.2_all.deb ...\n", - "Unpacking libx11-doc (2:1.6.4-3ubuntu0.2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package x11proto-xf86dga-dev.\n", - "Preparing to unpack .../309-x11proto-xf86dga-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xf86dga-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxxf86dga-dev:amd64.\n", - "Preparing to unpack .../310-libxxf86dga-dev_2%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86dga-dev:amd64 (2:1.1.4-1) ...\n", - "Selecting previously unselected package subversion.\n", - "Preparing to unpack .../311-subversion_1.9.7-4ubuntu1_amd64.deb ...\n", - "Unpacking subversion (1.9.7-4ubuntu1) ...\n", - "Selecting previously unselected package timgm6mb-soundfont.\n", - "Preparing to unpack .../312-timgm6mb-soundfont_1.3-2_all.deb ...\n", - "Unpacking timgm6mb-soundfont (1.3-2) ...\n", - "Selecting previously unselected package zip.\n", - "Preparing to unpack .../313-zip_3.0-11build1_amd64.deb ...\n", - "Unpacking zip (3.0-11build1) ...\n", - "Selecting previously unselected package libc6-dbg:amd64.\n", - "Preparing to unpack .../314-libc6-dbg_2.27-3ubuntu1_amd64.deb ...\n", - "Unpacking libc6-dbg:amd64 (2.27-3ubuntu1) ...\n", - "Selecting previously unselected package libdca0:amd64.\n", - "Preparing to unpack .../315-libdca0_0.0.5-10_amd64.deb ...\n", - "Unpacking libdca0:amd64 (0.0.5-10) ...\n", - "Selecting previously unselected package libdca-dev:amd64.\n", - "Preparing to unpack .../316-libdca-dev_0.0.5-10_amd64.deb ...\n", - "Unpacking libdca-dev:amd64 (0.0.5-10) ...\n", - "Selecting previously unselected package libdts-dev:amd64.\n", - "Preparing to unpack .../317-libdts-dev_0.0.5-10_amd64.deb ...\n", - "Unpacking libdts-dev:amd64 (0.0.5-10) ...\n", - "Setting up libapr1:amd64 (1.6.3-2) ...\n", - "Setting up libglvnd0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libopengl0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libpathplan4 (2.40.1-2) ...\n", - "Setting up libgles1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libicu60:amd64 (60.2-3ubuntu3.1) ...\n", - "Setting up liblab-gamut1 (2.40.1-2) ...\n", - "Setting up libgtk2.0-common (2.24.32-1ubuntu1) ...\n", - "Setting up libxdot4 (2.40.1-2) ...\n", - "Setting up libc6-dbg:amd64 (2.27-3ubuntu1) ...\n", - "Setting up libasyncns0:amd64 (0.8-6) ...\n", - "Setting up libarchive-zip-perl (1.60-1ubuntu0.1) ...\n", - "Setting up libmodplug1:amd64 (1:0.8.9.0-1) ...\n", - "Setting up libtimedate-perl (2.3000-2) ...\n", - "Setting up libjbig0:amd64 (2.1-3.1build1) ...\n", - "Setting up libsigsegv2:amd64 (2.12-1) ...\n", - "Setting up libpthread-stubs0-dev:amd64 (0.3-4) ...\n", - "Setting up fonts-dejavu-core (2.37-1) ...\n", - "Setting up gir1.2-freedesktop:amd64 (1.56.1-1) ...\n", - "Setting up libelf1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Setting up groff-base (1.22.3-10) ...\n", - "Setting up libglib2.0-0:amd64 (2.56.4-0ubuntu0.18.04.6) ...\n", - "No schema files found: doing nothing.\n", - "Setting up libasound2-data (1.1.3-5ubuntu0.4) ...\n", - "Setting up libxshmfence1:amd64 (1.3-1) ...\n", - "Setting up xorg-sgml-doctools (1:1.11-1) ...\n", - "Setting up libwayland-client0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up xkb-data (2.23.1-1ubuntu1.18.04.1) ...\n", - "Setting up libproxy1v5:amd64 (0.4.15-1) ...\n", - "Setting up libarchive-cpio-perl (0.10-1) ...\n", - "Setting up libgdk-pixbuf2.0-common (2.36.11-2) ...\n", - "Setting up libdatrie1:amd64 (0.2.10-7) ...\n", - "Setting up libtiff5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Setting up gettext-base (0.19.8.1-6ubuntu0.3) ...\n", - "Setting up libpipeline1:amd64 (1.5.0-1) ...\n", - "Setting up libglapi-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up m4 (1.4.18-1) ...\n", - "Setting up fp-docs-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libxml2:amd64 (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Setting up zip (3.0-11build1) ...\n", - "Setting up x11proto-dev (2018.4-4) ...\n", - "Setting up libmagic-mgc (1:5.32-2ubuntu0.3) ...\n", - "Setting up libasound2:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Setting up libopenal-data (1:1.18.2-2) ...\n", - "Setting up libmagic1:amd64 (1:5.32-2ubuntu0.3) ...\n", - "Setting up libdrm-common (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libgraphite2-3:amd64 (1.3.11-2) ...\n", - "Setting up libjbig-dev:amd64 (2.1-3.1build1) ...\n", - "Setting up libcroco3:amd64 (0.6.12-2) ...\n", - "Setting up libogg0:amd64 (1.3.2-1) ...\n", - "Setting up libsys-hostname-long-perl (1.5-1) ...\n", - "Setting up libatk1.0-data (2.28.1-1) ...\n", - "Setting up liba52-0.7.4:amd64 (0.7.4-19) ...\n", - "Setting up x11proto-damage-dev (1:2018.4-4) ...\n", - "Setting up libx11-xcb1:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libpixman-1-0:amd64 (0.34.0-2) ...\n", - "Setting up xtrans-dev (1.3.5-1) ...\n", - "Setting up x11proto-xext-dev (2018.4-4) ...\n", - "Setting up libglib2.0-data (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up fp-units-rtl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libmail-sendmail-perl (0.80-1) ...\n", - "Setting up lazarus-src-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up x11proto-xinerama-dev (2018.4-4) ...\n", - "Setting up fpc-source-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up autotools-dev (20180224.1) ...\n", - "Setting up libpixman-1-dev:amd64 (0.34.0-2) ...\n", - "Setting up libatk1.0-0:amd64 (2.28.1-1) ...\n", - "Setting up libaprutil1:amd64 (1.6.1-2) ...\n", - "Setting up x11proto-randr-dev (2018.4-4) ...\n", - "Setting up libltdl7:amd64 (2.4.6-2) ...\n", - "Setting up libtiffxx5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Setting up liba52-0.7.4-dev (0.7.4-19) ...\n", - "Setting up libx11-doc (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libpciaccess0:amd64 (0.14-1) ...\n", - "Setting up x11proto-xf86dga-dev (2018.4-4) ...\n", - "Setting up libmikmod-config (3.3.11.1-3) ...\n", - "Setting up libsensors4:amd64 (1:3.4.0-4) ...\n", - "Setting up libgles2:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up shared-mime-info (1.9-2) ...\n", - "Setting up libxkbcommon0:amd64 (0.8.2-1~ubuntu18.04.1) ...\n", - "Setting up libpcrecpp0v5:amd64 (2:8.39-9) ...\n", - "Setting up libpcre32-3:amd64 (2:8.39-9) ...\n", - "Setting up libvpx5:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Setting up gdbserver (8.1-0ubuntu3.2) ...\n", - "Setting up icu-devtools (60.2-3ubuntu3.1) ...\n", - "Setting up libpcre16-3:amd64 (2:8.39-9) ...\n", - "Setting up libthai-data (0.1.27-2) ...\n", - "Setting up liblzma-dev:amd64 (5.2.2-1.3) ...\n", - "Setting up libxdmcp6:amd64 (1:1.1.2-3) ...\n", - "Setting up timgm6mb-soundfont (1.3-2) ...\n", - "Setting up libmad0:amd64 (0.15.1b-9ubuntu18.04.1) ...\n", - "Setting up libllvm9:amd64 (1:9-2~ubuntu18.04.2) ...\n", - "Setting up bsdmainutils (11.1.2ubuntu1) ...\n", - "update-alternatives: using /usr/bin/bsd-write to provide /usr/bin/write (write) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/write.1.gz because associated file /usr/share/man/man1/bsd-write.1.gz (of link group write) doesn't exist\n", - "update-alternatives: using /usr/bin/bsd-from to provide /usr/bin/from (from) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/from.1.gz because associated file /usr/share/man/man1/bsd-from.1.gz (of link group from) doesn't exist\n", - "Setting up libgmpxx4ldbl:amd64 (2:6.1.2+dfsg-2) ...\n", - "Setting up libdca0:amd64 (0.0.5-10) ...\n", - "Setting up libsamplerate0:amd64 (0.1.9-1) ...\n", - "Setting up libsndio6.1:amd64 (1.1.0-3) ...\n", - "Setting up libvorbis0a:amd64 (1.3.5-4.2) ...\n", - "Setting up x11-common (1:7.7+19ubuntu7.1) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-rc.d: warning: start and stop actions are no longer supported; falling back to defaults\n", - "invoke-rc.d: could not determine current runlevel\n", - "invoke-rc.d: policy-rc.d denied execution of start.\n", - "Setting up libmodplug-dev:amd64 (1:0.8.9.0-1) ...\n", - "Setting up fp-utils-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "update-alternatives: using /usr/lib/x86_64-linux-gnu/fpc/3.0.4 to provide /usr/lib/x86_64-linux-gnu/fpc/default (fp-utils) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/chmcmd.1.gz because associated file /usr/share/man/man1/chmcmd-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/chmls.1.gz because associated file /usr/share/man/man1/chmls-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ifpc.1.gz because associated file /usr/share/man/man1/ifpc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fppkg.1.gz because associated file /usr/share/man/man1/fppkg-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/instantfpc.1.gz because associated file /usr/share/man/man1/ifpc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/bin2obj.1.gz because associated file /usr/share/man/man1/bin2obj-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/data2inc.1.gz because associated file /usr/share/man/man1/data2inc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fprcp.1.gz because associated file /usr/share/man/man1/fprcp-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/h2paspp.1.gz because associated file /usr/share/man/man1/h2paspp-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/makeskel.1.gz because associated file /usr/share/man/man1/makeskel-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/postw32.1.gz because associated file /usr/share/man/man1/postw32-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/relpath.1.gz because associated file /usr/share/man/man1/relpath-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/rmcvsdir.1.gz because associated file /usr/share/man/man1/rmcvsdir-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/unitdiff.1.gz because associated file /usr/share/man/man1/unitdiff-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/delp.1.gz because associated file /usr/share/man/man1/delp-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpcmake.1.gz because associated file /usr/share/man/man1/fpcmake-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpcsubst.1.gz because associated file /usr/share/man/man1/fpcsubst-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/h2pas.1.gz because associated file /usr/share/man/man1/h2pas-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/plex.1.gz because associated file /usr/share/man/man1/plex-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppdep.1.gz because associated file /usr/share/man/man1/ppdep-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppudump.1.gz because associated file /usr/share/man/man1/ppudump-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppufiles.1.gz because associated file /usr/share/man/man1/ppufiles-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppumove.1.gz because associated file /usr/share/man/man1/ppumove-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ptop.1.gz because associated file /usr/share/man/man1/ptop-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/pyacc.1.gz because associated file /usr/share/man/man1/pyacc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/rstconv.1.gz because associated file /usr/share/man/man1/rstconv-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpdoc.1.gz because associated file /usr/share/man/man1/fpdoc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpclasschart.1.gz because associated file /usr/share/man/man1/fpclasschart-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man5/fpcmake.5.gz because associated file /usr/share/man/man5/fpcmake-3.0.4.5.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man5/ptop.cfg.5.gz because associated file /usr/share/man/man5/ptop-3.0.4.cfg.5.gz (of link group fp-utils) doesn't exist\n", - "Setting up hicolor-icon-theme (0.17-2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Setting up libglib2.0-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up libogg-dev:amd64 (1.3.2-1) ...\n", - "Setting up libslang2:amd64 (2.3.1a-3ubuntu1) ...\n", - "Setting up libslang2-dev:amd64 (2.3.1a-3ubuntu1) ...\n", - "Setting up libglvnd-core-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libgraphite2-dev:amd64 (1.3.11-2) ...\n", - "Setting up libwayland-cursor0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up x11proto-input-dev (2018.4-4) ...\n", - "Setting up x11proto-composite-dev (1:2018.4-4) ...\n", - "Setting up libcdt5 (2.40.1-2) ...\n", - "Setting up libwayland-egl1:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up libx11-data (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libpython2.7-stdlib:amd64 (2.7.17-1~18.04) ...\n", - "Setting up lazarus-doc-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up fp-ide-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-alternatives: using /usr/bin/fp-3.0.4 to provide /usr/bin/fp (fp) in auto mode\n", - "Setting up libxau6:amd64 (1:1.0.8-1) ...\n", - "Setting up autopoint (0.19.8.1-6ubuntu0.3) ...\n", - "Setting up libidn11:amd64 (1.33-2.1ubuntu1.2) ...\n", - "Setting up liblzo2-2:amd64 (2.08-1.2) ...\n", - "Setting up libavahi-common-data:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Setting up libcgraph6 (2.40.1-2) ...\n", - "Setting up fpc-source (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libwayland-server0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up libwebp6:amd64 (0.6.1-2) ...\n", - "Setting up libfile-stripnondeterminism-perl (0.040-1.1~build1) ...\n", - "Setting up fp-units-base-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libvorbisfile3:amd64 (1.3.5-4.2) ...\n", - "Setting up libgmp-dev:amd64 (2:6.1.2+dfsg-2) ...\n", - "Setting up fp-units-multimedia-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up fp-units-math-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libgts-0.7-5:amd64 (0.7.6+darcs121130-4) ...\n", - "Setting up libmad0-dev (0.15.1b-9ubuntu18.04.1) ...\n", - "Setting up libasound2-dev:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Setting up libdca-dev:amd64 (0.0.5-10) ...\n", - "Setting up libpcre3-dev:amd64 (2:8.39-9) ...\n", - "Setting up gir1.2-atk-1.0:amd64 (2.28.1-1) ...\n", - "Setting up fontconfig-config (2.12.6-0ubuntu2) ...\n", - "Setting up x11proto-core-dev (2018.4-4) ...\n", - "Setting up libltdl-dev:amd64 (2.4.6-2) ...\n", - "Setting up libxshmfence-dev:amd64 (1.3-1) ...\n", - "Setting up libglib2.0-dev-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up libdw1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Setting up gettext (0.19.8.1-6ubuntu0.3) ...\n", - "Setting up fp-units-fv-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up fp-compiler-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-alternatives: using /usr/bin/x86_64-linux-gnu-fpc-3.0.4 to provide /usr/bin/fpc (fpc) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppcx64.1.gz because associated file /usr/share/man/man1/ppcx64-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpc.1.gz because associated file /usr/share/man/man1/fpc-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpc-depends.1.gz because associated file /usr/share/man/man1/fpc-depends-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpcres.1.gz because associated file /usr/share/man/man1/fpcres-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: using /etc/fpc-3.0.4.cfg to provide /etc/fpc.cfg (fpc.cfg) in auto mode\n", - "update-alternatives: using /usr/bin/fpc to provide /usr/bin/pc (pc) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/pc.1.gz because associated file /usr/share/man/man1/fpc.1.gz (of link group pc) doesn't exist\n", - "Setting up libgvpr2 (2.40.1-2) ...\n", - "Setting up libproxy-tools (0.4.15-1) ...\n", - "Setting up x11proto-fixes-dev (1:2018.4-4) ...\n", - "Setting up libtiff5-dev:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Setting up libflac8:amd64 (1.3.2-1) ...\n", - "Setting up libxml2-utils (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Setting up fp-units-gfx-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up python2.7 (2.7.17-1~18.04) ...\n", - "Setting up fp-units-fcl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libharfbuzz0b:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up x11proto-xf86vidmode-dev (2018.4-4) ...\n", - "Setting up libxau-dev:amd64 (1:1.0.8-1) ...\n", - "Setting up autoconf (2.69-11) ...\n", - "Setting up fp-units-misc-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libthai0:amd64 (0.1.27-2) ...\n", - "Setting up fp-units-net-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up file (1:5.32-2ubuntu0.3) ...\n", - "Setting up fp-units-db-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libglib2.0-dev:amd64 (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up libdrm2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libpython-stdlib:amd64 (2.7.15~rc1-1) ...\n", - "Setting up intltool-debian (0.35.0+20060710.4) ...\n", - "Setting up libdrm-intel1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libvpx-dev:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Setting up libxdmcp-dev:amd64 (1:1.1.2-3) ...\n", - "Setting up gir1.2-harfbuzz-0.0:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up libserf-1-1:amd64 (1.3.9-6) ...\n", - "Setting up libvlccore9:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up lcl-utils-1.8 (1.8.2+dfsg-3) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-alternatives: using /usr/lib/lazarus/1.8.2 to provide /usr/lib/lazarus/default (lazarus) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/lazbuild.1.gz because associated file /usr/share/man/man1/lazbuild-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/lazre.1.gzs because associated file /usr/share/man/man1/lazres-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/lrstolfm.1.gz because associated file /usr/share/man/man1/lrstolfm-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/svn2revisioninc.1.gz because associated file /usr/share/man/man1/svn2revisioninc-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/updatepofiles.1.gz because associated file /usr/share/man/man1/updatepofiles-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "Setting up automake (1:1.15.1-3ubuntu2) ...\n", - "update-alternatives: using /usr/bin/automake-1.15 to provide /usr/bin/automake (automake) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/automake.1.gz because associated file /usr/share/man/man1/automake-1.15.1.gz (of link group automake) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/aclocal.1.gz because associated file /usr/share/man/man1/aclocal-1.15.1.gz (of link group automake) doesn't exist\n", - "Setting up libjack-jackd2-0:amd64 (1.9.12~dfsg-2) ...\n", - "Setting up libice6:amd64 (2:1.0.9-2) ...\n", - "Setting up man-db (2.8.3-2ubuntu0.1) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "Building database of manual pages ...\n", - "Setting up libopenal1:amd64 (1:1.18.2-2) ...\n", - "Setting up libdts-dev:amd64 (0.0.5-10) ...\n", - "Setting up libsvn1:amd64 (1.9.7-4ubuntu1) ...\n", - "Setting up libavahi-common3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Setting up libdrm-radeon1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libgts-bin (0.7.6+darcs121130-4) ...\n", - "Setting up libvorbisenc2:amd64 (1.3.5-4.2) ...\n", - "Setting up libdrm-nouveau2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libcaca0:amd64 (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Setting up libxcb1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libharfbuzz-gobject0:amd64 (1.7.2-1ubuntu1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Setting up python (2.7.15~rc1-1) ...\n", - "Setting up libbabeltrace1:amd64 (1.5.5-1) ...\n", - "Setting up libvorbis-dev:amd64 (1.3.5-4.2) ...\n", - "Setting up libtool (2.4.6-2) ...\n", - "Setting up libxcb-present0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libtiff-dev (4.0.9-5ubuntu0.3) ...\n", - "Setting up libfontconfig1:amd64 (2.12.6-0ubuntu2) ...\n", - "Setting up libxcb-dri2-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libsm6:amd64 (2:1.2.2-1) ...\n", - "Setting up libxcb-dri3-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-glx0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-randr0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-xfixes0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-render0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libvlc5:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up libharfbuzz-icu0:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up po-debconf (1.0.20) ...\n", - "Setting up libdrm-amdgpu1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up lcl-nogui-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up libdrm-dev:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libicu-le-hb0:amd64 (1.0.3+git161113-4) ...\n", - "Setting up libgbm1:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libx11-6:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libflac-dev:amd64 (1.3.2-1) ...\n", - "Setting up libgl1-mesa-dri:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libatk1.0-dev:amd64 (2.28.1-1) ...\n", - "Setting up libxcb-sync1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libsndfile1:amd64 (1.0.28-4ubuntu0.18.04.1) ...\n", - "Setting up libcaca-dev (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Setting up libvlc-bin:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up libice-dev:amd64 (2:1.0.9-2) ...\n", - "Setting up libiculx60:amd64 (60.2-3ubuntu3.1) ...\n", - "Setting up subversion (1.9.7-4ubuntu1) ...\n", - "Setting up libxcomposite1:amd64 (1:0.4.4-2) ...\n", - "Setting up libxcb-shm0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libvlc-dev:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up libxpm4:amd64 (1:3.5.12-1) ...\n", - "Setting up libxt6:amd64 (1:1.1.5-1) ...\n", - "Setting up libxcb-shape0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxrender1:amd64 (1:0.9.10-1) ...\n", - "Setting up libxcb1-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-glx0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libavahi-client3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Setting up libegl-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libx11-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libxft2:amd64 (2.3.2-1) ...\n", - "Setting up gdb (8.1-0ubuntu3.2) ...\n", - "Setting up libxcb-sync-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up fontconfig (2.12.6-0ubuntu2) ...\n", - "Regenerating fonts cache... done.\n", - "Setting up libcups2:amd64 (2.2.7-1ubuntu2.7) ...\n", - "Setting up libfontconfig1-dev:amd64 (2.12.6-0ubuntu2) ...\n", - "Setting up libx11-xcb-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libforms2 (1.2.3-1.3) ...\n", - "Setting up libsm-dev:amd64 (2:1.2.2-1) ...\n", - "Setting up libxdamage1:amd64 (1:1.1.4-3) ...\n", - "Setting up mesa-common-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libxext6:amd64 (2:1.3.3-1) ...\n", - "Setting up libxfixes3:amd64 (1:5.0.3-1) ...\n", - "Setting up libxpm-dev:amd64 (1:3.5.12-1) ...\n", - "Setting up libxss1:amd64 (1:1.2.2-1) ...\n", - "Setting up libgdk-pixbuf2.0-0:amd64 (2.36.11-2) ...\n", - "Setting up libxcb-shm0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libgdk-pixbuf2.0-bin (2.36.11-2) ...\n", - "Setting up libgd3:amd64 (2.2.5-4ubuntu0.4) ...\n", - "Setting up gir1.2-gdkpixbuf-2.0:amd64 (2.36.11-2) ...\n", - "Setting up libxrender-dev:amd64 (1:0.9.10-1) ...\n", - "Setting up libpulse0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Setting up libxcb-dri2-0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-render0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libegl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libxft-dev (2.3.2-1) ...\n", - "Setting up gtk-update-icon-cache (3.22.30-1ubuntu4) ...\n", - "Setting up libgdk-pixbuf2.0-dev (2.36.11-2) ...\n", - "Setting up libxcb-dri3-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcursor1:amd64 (1:1.1.15-1) ...\n", - "Setting up libxxf86dga1:amd64 (2:1.1.4-1) ...\n", - "Setting up libxext-dev:amd64 (2:1.3.3-1) ...\n", - "Setting up libpango-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libwayland-egl1-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libxcb-shape0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxxf86dga-dev:amd64 (2:1.1.4-1) ...\n", - "Setting up libxxf86vm1:amd64 (1:1.1.4-1) ...\n", - "Setting up libxxf86vm-dev:amd64 (1:1.1.4-1) ...\n", - "Setting up libxfixes-dev:amd64 (1:5.0.3-1) ...\n", - "Setting up libfluidsynth1:amd64 (1.1.9-1) ...\n", - "Setting up libxrandr2:amd64 (2:1.5.1-1) ...\n", - "Setting up libglx-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libxi6:amd64 (2:1.7.9-1) ...\n", - "Setting up libcairo2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libxinerama1:amd64 (2:1.1.3-1) ...\n", - "Setting up libxcursor-dev:amd64 (1:1.1.15-1) ...\n", - "Setting up libforms-dev (1.2.3-1.3) ...\n", - "Setting up libxcb-randr0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxt-dev:amd64 (1:1.1.5-1) ...\n", - "Setting up libpulse-mainloop-glib0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Setting up libpulse-dev:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Setting up libxrandr-dev:amd64 (2:1.5.1-1) ...\n", - "Setting up libxcomposite-dev:amd64 (1:0.4.4-2) ...\n", - "Setting up libcairo-script-interpreter2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libxcb-xfixes0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libcairo-gobject2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libxdamage-dev:amd64 (1:1.1.4-3) ...\n", - "Setting up libsdl1.2debian:amd64 (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Setting up libpangoft2-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libgd-dev:amd64 (2.2.5-4ubuntu0.4) ...\n", - "Setting up libsdl2-2.0-0:amd64 (2.0.8+dfsg1-1ubuntu1.18.04.4) ...\n", - "Setting up libxinerama-dev:amd64 (2:1.1.3-1) ...\n", - "Setting up libxcb-present-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxi-dev:amd64 (2:1.7.9-1) ...\n", - "Setting up libglx0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libcairo2-dev:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libpangoxft-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libpangocairo-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up gir1.2-pango-1.0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libmikmod3:amd64 (3.3.11.1-3) ...\n", - "Setting up libgl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libglu1-mesa:amd64 (9.0.0-2.1build1) ...\n", - "Setting up libgvc6 (2.40.1-2) ...\n", - "Setting up librsvg2-2:amd64 (2.40.20-2) ...\n", - "Setting up libsdl-mixer1.2:amd64 (1.2.12-14) ...\n", - "Setting up libmikmod-dev:amd64 (3.3.11.1-3) ...\n", - "Setting up libglvnd-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up librsvg2-common:amd64 (2.40.20-2) ...\n", - "Setting up libgl1-mesa-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libglu1-mesa-dev:amd64 (9.0.0-2.1build1) ...\n", - "Setting up libsdl1.2-dev (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Setting up libsdl-mixer1.2-dev:amd64 (1.2.12-14) ...\n", - "Setting up dh-autoreconf (17) ...\n", - "Setting up libicu-le-hb-dev:amd64 (1.0.3+git161113-4) ...\n", - "Setting up libicu-dev (60.2-3ubuntu3.1) ...\n", - "Setting up libharfbuzz-dev:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up adwaita-icon-theme (3.28.0-1ubuntu1) ...\n", - "update-alternatives: using /usr/share/icons/Adwaita/cursor.theme to provide /usr/share/icons/default/index.theme (x-cursor-theme) in auto mode\n", - "Setting up debhelper (11.1.6ubuntu2) ...\n", - "Setting up libgtk2.0-0:amd64 (2.24.32-1ubuntu1) ...\n", - "Setting up libgail18:amd64 (2.24.32-1ubuntu1) ...\n", - "Setting up lazarus-ide-1.8 (1.8.2+dfsg-3) ...\n", - "update-alternatives: using /usr/lib/lazarus/1.8.2/startlazarus to provide /usr/bin/lazarus-ide (lazarus-ide) in auto mode\n", - "Setting up libgail-common:amd64 (2.24.32-1ubuntu1) ...\n", - "Setting up libgvc6-plugins-gtk (2.40.1-2) ...\n", - "Setting up humanity-icon-theme (0.6.15) ...\n", - "Setting up libgraphviz-dev (2.40.1-2) ...\n", - "Setting up dh-strip-nondeterminism (0.040-1.1~build1) ...\n", - "Setting up lazarus-ide-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "update-alternatives: using /usr/lib/lazarus/1.8.2/lazarus-gtk2 to provide /usr/lib/lazarus/1.8.2/lazarus (lazarus-1.8.2) in auto mode\n", - "Setting up libpango1.0-dev (1.40.14-1ubuntu0.1) ...\n", - "Setting up lazarus-ide (1.8.2+dfsg-3) ...\n", - "Setting up gir1.2-gtk-2.0 (2.24.32-1ubuntu1) ...\n", - "Setting up libgtk2.0-bin (2.24.32-1ubuntu1) ...\n", - "Setting up ubuntu-mono (16.10+18.04.20181005-0ubuntu1) ...\n", - "Setting up libgtk2.0-dev (2.24.32-1ubuntu1) ...\n", - "Setting up fp-units-gtk2-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up fpc-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up lcl-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up lcl-units-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up fpc (3.0.4+dfsg-18ubuntu2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Setting up lcl-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up lazarus-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up lazarus (1.8.2+dfsg-3) ...\n", - "Processing triggers for libc-bin (2.27-3ubuntu1) ...\n", - "Processing triggers for mime-support (3.60ubuntu1) ...\n", - "Processing triggers for libvlc-bin:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Processing triggers for libgdk-pixbuf2.0-0:amd64 (2.36.11-2) ...\n" - ] - } - ], - "source": [ - "!apt-get install -y fpc fpc-source lazarus git subversion zip unzip" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A mtprocs/examples\n", - "A mtprocs/examples/parallelloop1.lpr\n", - "A mtprocs/examples/parallelloop_nested1.lpi\n", - "A mtprocs/examples/parallelloop_nested1.lpr\n", - "A mtprocs/examples/recursivemtp1.lpr\n", - "A mtprocs/examples/simplemtp1.lpr\n", - "A mtprocs/examples/parallelloop1.lpi\n", - "A mtprocs/examples/recursivemtp1.lpi\n", - "A mtprocs/examples/simplemtp1.lpi\n", - "A mtprocs/examples/testmtp1.lpi\n", - "A mtprocs/examples/testmtp1.lpr\n", - "A mtprocs/Readme.txt\n", - "A mtprocs/mtprocs.pas\n", - "A mtprocs/mtpcpu.pas\n", - "A mtprocs/multithreadprocslaz.lpk\n", - "A mtprocs/mtputils.pas\n", - "A mtprocs/multithreadprocslaz.pas\n", - "Checked out revision 7371.\n" - ] + "cells": [ + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "id": "LweSXQHJlq3c" + }, + "outputs": [], + "source": [ + "# This is a simple plant leaf disease classifier inspired from Data from:\n", + "# Identification of Plant Leaf Diseases Using a 9-layer Deep Convolutional Neural Network\n", + "# https://data.mendeley.com/datasets/tywbtsjrjv/1\n", + "# https://www.tensorflow.org/datasets/catalog/plant_village\n", + "\n", + "# This source code required the CAI Neural API found at:\n", + "# https://github.com/joaopauloschuler/neural-api\n", + "\n", + "# To be able to run this code, you'll need at least 32GB of RAM.\n", + "\n", + "has_plant_leaf_disease = True\n", + "has_tiny_imagenet_200 = False" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "id": "2y_lHtCNlq3h", + "outputId": "be45b99a-62a8-4843-a2ac-a0a44fda2d96", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Reading package lists... Done\n", + "Building dependency tree \n", + "Reading state information... Done\n", + "zip is already the newest version (3.0-11build1).\n", + "fpc is already the newest version (3.0.4+dfsg-23).\n", + "fpc-source is already the newest version (3.0.4+dfsg-23).\n", + "lazarus is already the newest version (2.0.6+dfsg-3).\n", + "git is already the newest version (1:2.25.1-1ubuntu3.11).\n", + "unzip is already the newest version (6.0-25ubuntu1.1).\n", + "subversion is already the newest version (1.13.0-3ubuntu0.2).\n", + "0 upgraded, 0 newly installed, 0 to remove and 13 not upgraded.\n" + ] + } + ], + "source": [ + "!apt-get install -y fpc fpc-source lazarus git subversion zip unzip" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "id": "rnnYbBVClq3j", + "outputId": "3be591f0-3977-4f60-9307-481b68208d41", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Checked out revision 8879.\n" + ] + } + ], + "source": [ + "!svn checkout https://svn.code.sf.net/p/lazarus-ccr/svn/components/multithreadprocs mtprocs" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "id": "5H6s7WQxlq3j", + "outputId": "6f49a4e2-0fba-47ae-df78-4af743f3f0a5", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Already up to date.\n", + "Processing /content/k\n", + " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + "Requirement already satisfied: pandas>=0.22.0 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (1.5.3)\n", + "Requirement already satisfied: scikit-image>=0.15.0 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (0.19.3)\n", + "Requirement already satisfied: opencv-python>=4.1.2.30 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (4.7.0.72)\n", + "Requirement already satisfied: scikit-learn>=0.21.0 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (1.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (1.22.4)\n", + "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.22.0->cai==0.1.7) (2.8.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.22.0->cai==0.1.7) (2022.7.1)\n", + "Requirement already satisfied: scipy>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (1.10.1)\n", + "Requirement already satisfied: networkx>=2.2 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (3.1)\n", + "Requirement already satisfied: pillow!=7.1.0,!=7.1.1,!=8.3.0,>=6.1.0 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (8.4.0)\n", + "Requirement already satisfied: imageio>=2.4.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (2.25.1)\n", + "Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (2023.7.4)\n", + "Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (1.4.1)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (23.1)\n", + "Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.21.0->cai==0.1.7) (1.3.1)\n", + "Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.21.0->cai==0.1.7) (3.1.0)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.1->pandas>=0.22.0->cai==0.1.7) (1.16.0)\n", + "Building wheels for collected packages: cai\n", + " Building wheel for cai (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for cai: filename=cai-0.1.7-py3-none-any.whl size=61379 sha256=d14cbd88959d2c2ff5a080ba00e2f0a4e66cfda9a46971a10fbc4d2187691fe7\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-4d_zus1u/wheels/80/61/f5/947bedc7e497038def7d1381fb65d37bd126a80e010114b8f1\n", + "Successfully built cai\n", + "Installing collected packages: cai\n", + " Attempting uninstall: cai\n", + " Found existing installation: cai 0.1.7\n", + " Uninstalling cai-0.1.7:\n", + " Successfully uninstalled cai-0.1.7\n", + "Successfully installed cai-0.1.7\n" + ] + } + ], + "source": [ + "import os\n", + "\n", + "if not os.path.isdir('k'):\n", + " !git clone https://github.com/joaopauloschuler/k-neural-api.git k\n", + "else:\n", + " !cd k && git pull\n", + "\n", + "!cd k && pip install ." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "id": "pjnqf77blq3k", + "outputId": "1318cf4f-db6f-4f15-d61f-0cd97fd085a8", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Checked out revision 1773.\n" + ] + } + ], + "source": [ + "!svn checkout https://svn.code.sf.net/p/cai/svncode/trunk/lazarus neural-api" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "id": "hZ-TbJbslq3l", + "outputId": "8d7389a2-c97c-4fab-9cd5-ef826004f438", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-iWTOTP\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-va\" \"compilertest.pas\"\n" + ] + } + ], + "source": [ + "!lazbuild mtprocs/multithreadprocslaz.lpk" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "id": "r_8ktGAwlq3m", + "outputId": "b9baa14c-7ca2-4579-c5ea-f391c5dbf89e", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-iWTOTP\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-va\" \"compilertest.pas\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-iWTOTP\" \"-Px86_64\" \"-Tlinux\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-va\" \"compilertest.pas\" \"-Px86_64\" \"-Tlinux\"\n", + "Info: (lazarus) Execute Title=\"Compile Project, Mode: Default, Target: /content/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\"\n", + "Info: (lazarus) Working Directory=\"/content/neural-api/examples/SimplePlantLeafDisease/\"\n", + "Info: (lazarus) Executable=\"/usr/bin/fpc\"\n", + "Info: (lazarus) Param[0]=\"-MObjFPC\"\n", + "Info: (lazarus) Param[1]=\"-Scghi\"\n", + "Info: (lazarus) Param[2]=\"-Cg\"\n", + "Info: (lazarus) Param[3]=\"-O3\"\n", + "Info: (lazarus) Param[4]=\"-l\"\n", + "Info: (lazarus) Param[5]=\"-vewnhibq\"\n", + "Info: (lazarus) Param[6]=\"-Fi/content/neural-api/neural\"\n", + "Info: (lazarus) Param[7]=\"-Fi/content/neural-api/bin/x86_64-linux/units\"\n", + "Info: (lazarus) Param[8]=\"-Fu/content/neural-api/neural\"\n", + "Info: (lazarus) Param[9]=\"-Fu/usr/lib/lazarus/2.0.6/lcl/units/x86_64-linux\"\n", + "Info: (lazarus) Param[10]=\"-Fu/usr/lib/lazarus/2.0.6/components/lazutils/lib/x86_64-linux\"\n", + "Info: (lazarus) Param[11]=\"-Fu/content/mtprocs/lib/x86_64-linux\"\n", + "Info: (lazarus) Param[12]=\"-Fu/usr/lib/lazarus/2.0.6/packager/units/x86_64-linux\"\n", + "Info: (lazarus) Param[13]=\"-Fu/content/neural-api/examples/SimplePlantLeafDisease/\"\n", + "Info: (lazarus) Param[14]=\"-FU/content/neural-api/bin/x86_64-linux/units/\"\n", + "Info: (lazarus) Param[15]=\"-FE/content/neural-api/bin/x86_64-linux/bin/\"\n", + "Info: (lazarus) Param[16]=\"-o/content/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\"\n", + "Info: (lazarus) Param[17]=\"-dUseCThreads\"\n", + "Info: (lazarus) Param[18]=\"-dAVX\"\n", + "Info: (lazarus) Param[19]=\"-dRelease\"\n", + "Info: (lazarus) Param[20]=\"SimplePlantLeafDisease.pas\"\n", + "Hint: (11030) Start of reading config file /etc/fpc.cfg\n", + "Compiling Release Version\n", + "Hint: (11031) End of reading config file /etc/fpc.cfg\n", + "Free Pascal Compiler version 3.0.4+dfsg-23 [2019/11/25] for x86_64\n", + "Copyright (c) 1993-2017 by Florian Klaempfl and others\n", + "(1002) Target OS: Linux for x86-64\n", + "(3104) Compiling SimplePlantLeafDisease.pas\n", + "/content/neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas(14,60) Hint: (5023) Unit \"math\" not used in SimplePlantLeafDisease\n", + "(9015) Linking /content/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\n", + "/usr/bin/ld.bfd: warning: /content/neural-api/bin/x86_64-linux/bin/link.res contains output sections; did you forget -T?\n", + "(1008) 95 lines compiled, 1.1 sec\n", + "(1022) 3 hint(s) issued\n" + ] + } + ], + "source": [ + "!lazbuild neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.lpi" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "id": "2ws6HVE7lq3o" + }, + "outputs": [], + "source": [ + "import cai.layers\n", + "import cai.datasets\n", + "import cai.models" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "id": "A5CSNeIclq3p" + }, + "outputs": [], + "source": [ + "if (has_tiny_imagenet_200):\n", + " url_zip_file=\"http://cs231n.stanford.edu/tiny-imagenet-200.zip\"\n", + " local_zip_file=\"tiny-imagenet-200.zip\"\n", + " expected_folder_name=\"download-tiny-imagenet-200\"\n", + " Verbose=True\n", + " cai.datasets.download_zip_and_extract(\n", + " url_zip_file=url_zip_file, local_zip_file=local_zip_file,\n", + " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", + " if os.path.isdir('download-tiny-imagenet-200/tiny-imagenet-200'):\n", + " !mv download-tiny-imagenet-200/tiny-imagenet-200 tiny-imagenet-200" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "id": "xwsAMDJPlq3q" + }, + "outputs": [], + "source": [ + "if (has_plant_leaf_disease):\n", + " url_zip_file=\"https://data.mendeley.com/datasets/tywbtsjrjv/1/files/d5652a28-c1d8-4b76-97f3-72fb80f94efc/Plant_leaf_diseases_dataset_without_augmentation.zip?dl=1\"\n", + " local_zip_file=\"plant_leaf.zip\"\n", + " expected_folder_name=\"plant_leaf\"\n", + " Verbose=True\n", + " cai.datasets.download_zip_and_extract(\n", + " url_zip_file=url_zip_file, local_zip_file=local_zip_file,\n", + " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", + " if os.path.isdir('plant_leaf/Plant_leave_diseases_dataset_without_augmentation'):\n", + " !mv plant_leaf/Plant_leave_diseases_dataset_without_augmentation plant" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "P0Gb4PfFlq3r", + "outputId": "5f2a2937-cffa-44e8-ca28-6b6d0c6402b4", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "RUNNING: SimplePlantLeafDisease\n", + "Creating Neural Network...\n", + " Layers: 14\n", + " Neurons:424\n", + " Weights:229058 Sum: -9.365533\n", + "Layer 0 Neurons: 0 Weights: 0 TNNetInput(96,96,3,0,0) Output:96,96,3 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Branches:1\n", + "Layer 1 Neurons: 64 Weights: 4800 TNNetConvolutionLinear(64,5,4,2,0) Output:50,50,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.8826 Bias Sum: 0.0000 Parent:0 Branches:1\n", + "Layer 2 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:1 Branches:1\n", + "Layer 3 Neurons: 1 Weights: 2 TNNetMovingStdNormalization(0,0,0,0,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 1.0000 Bias Sum: 0.0000 Parent:2 Branches:1\n", + "Layer 4 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -1.1006 Bias Sum: 0.0000 Parent:3 Branches:1\n", + "Layer 5 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -6.1782 Bias Sum: 0.0000 Parent:4 Branches:1\n", + "Layer 6 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:13,13,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:5 Branches:1\n", + "Layer 7 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:13,13,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.6385 Bias Sum: 0.0000 Parent:6 Branches:1\n", + "Layer 8 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:13,13,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -7.9876 Bias Sum: 0.0000 Parent:7 Branches:1\n", + "Layer 9 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,2,0) Output:7,7,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 3.1658 Bias Sum: 0.0000 Parent:8 Branches:1\n", + "Layer 10 Neurons: 0 Weights: 0 TNNetDropout(2,1,0,0,0) Output:7,7,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:9 Branches:1\n", + "Layer 11 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:4,4,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:10 Branches:1\n", + "Layer 12 Neurons: 39 Weights: 39936 TNNetFullConnectLinear(39,1,1,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.2140 Bias Sum: 0.0000 Parent:11 Branches:1\n", + "Layer 13 Neurons: 0 Weights: 0 TNNetSoftMax(0,0,0,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:12 Branches:0\n", + "Loading 100% of the Plant leave disease dataset into memory.\n", + "Training Images:49904 Validation Images:2775 Test Images:2775\n", + "File name is: SimplePlantLeafDisease\n", + "Learning rate:0.001000 L2 decay:0.000010 Inertia:0.900000 Batch size:64 Step size:64 Staircase ephocs:10 Min backprop error:0.20\n", + "Training images: 49904\n", + "Validation images: 2775\n", + "Test images: 2775\n", + "Computing...\n", + "640 Examples seen. Accuracy: 0.0175 Error: 1.93886 Loss: 3.54775 Threads: 4 Forward time: 4.94s Backward time: 4.41s Step time: 7.28s\n", + "1280 Examples seen. Accuracy: 0.0305 Error: 1.89895 Loss: 3.45915 Threads: 4 Forward time: 4.24s Backward time: 4.04s Step time: 7.51s\n", + "1920 Examples seen. Accuracy: 0.0424 Error: 1.82177 Loss: 3.06223 Threads: 4 Forward time: 4.78s Backward time: 4.41s Step time: 7.66s\n", + "2560 Examples seen. Accuracy: 0.0562 Error: 1.80944 Loss: 3.07066 Threads: 4 Forward time: 4.22s Backward time: 3.97s Step time: 7.16s\n", + "3200 Examples seen. Accuracy: 0.0674 Error: 1.77911 Loss: 2.94995 Threads: 4 Forward time: 4.23s Backward time: 3.92s Step time: 7.74s\n", + "3840 Examples seen. Accuracy: 0.0759 Error: 1.89523 Loss: 3.21194 Threads: 4 Forward time: 4.23s Backward time: 3.99s Step time: 7.09s\n", + "4480 Examples seen. Accuracy: 0.0911 Error: 1.84356 Loss: 2.92096 Threads: 4 Forward time: 4.29s Backward time: 3.99s Step time: 8.08s\n", + "5120 Examples seen. Accuracy: 0.1101 Error: 1.52856 Loss: 2.55728 Threads: 4 Forward time: 4.26s Backward time: 3.74s Step time: 7.12s\n", + "5760 Examples seen. Accuracy: 0.1262 Error: 1.60777 Loss: 2.75767 Threads: 4 Forward time: 4.30s Backward time: 3.83s Step time: 7.90s\n", + "6400 Examples seen. Accuracy: 0.1442 Error: 1.52103 Loss: 2.34901 Threads: 4 Forward time: 4.61s Backward time: 4.11s Step time: 7.03s\n", + "7040 Examples seen. Accuracy: 0.1607 Error: 1.55067 Loss: 2.34624 Threads: 4 Forward time: 4.20s Backward time: 3.78s Step time: 7.61s\n", + "7680 Examples seen. Accuracy: 0.1783 Error: 1.62708 Loss: 2.67945 Threads: 4 Forward time: 5.26s Backward time: 4.57s Step time: 7.36s\n", + "8320 Examples seen. Accuracy: 0.1947 Error: 1.55733 Loss: 2.34232 Threads: 4 Forward time: 4.27s Backward time: 3.74s Step time: 7.20s\n", + "8960 Examples seen. Accuracy: 0.2128 Error: 1.52980 Loss: 2.22569 Threads: 4 Forward time: 5.05s Backward time: 4.53s Step time: 7.69s\n", + "9600 Examples seen. Accuracy: 0.2286 Error: 1.53133 Loss: 2.16799 Threads: 4 Forward time: 4.30s Backward time: 3.94s Step time: 6.93s\n", + "10240 Examples seen. Accuracy: 0.2365 Error: 1.48934 Loss: 2.34372 Threads: 4 Forward time: 4.20s Backward time: 3.66s Step time: 7.76s\n", + "10880 Examples seen. Accuracy: 0.2477 Error: 1.60093 Loss: 2.29594 Threads: 4 Forward time: 4.17s Backward time: 3.54s Step time: 6.84s\n", + "11520 Examples seen. Accuracy: 0.2636 Error: 1.35666 Loss: 1.86014 Threads: 4 Forward time: 4.24s Backward time: 3.71s Step time: 7.45s\n", + "12160 Examples seen. Accuracy: 0.2771 Error: 1.40298 Loss: 1.80918 Threads: 4 Forward time: 4.18s Backward time: 3.44s Step time: 6.72s\n", + "12800 Examples seen. Accuracy: 0.2899 Error: 1.47205 Loss: 2.23363 Threads: 4 Forward time: 5.01s Backward time: 4.41s Step time: 7.84s\n", + "13440 Examples seen. Accuracy: 0.2992 Error: 1.54169 Loss: 2.44985 Threads: 4 Forward time: 4.21s Backward time: 3.60s Step time: 7.23s\n" + ] + } + ], + "source": [ + "if os.path.isdir('plant'):\n", + " print(\"RUNNING: SimplePlantLeafDisease\")\n", + " !neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "F1X-Ad6blq3r" + }, + "outputs": [], + "source": [] } - ], - "source": [ - "!svn checkout https://svn.code.sf.net/p/lazarus-ccr/svn/components/multithreadprocs mtprocs" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Cloning into 'k'...\n", - "remote: Enumerating objects: 240, done.\u001b[K\n", - "remote: Counting objects: 100% (240/240), done.\u001b[K\n", - "remote: Compressing objects: 100% (169/169), done.\u001b[K\n", - "remote: Total 240 (delta 148), reused 129 (delta 63), pack-reused 0\u001b[K\n", - "Receiving objects: 100% (240/240), 188.31 KiB | 2.73 MiB/s, done.\n", - "Resolving deltas: 100% (148/148), done.\n", - "Processing /tf/k\n", - "Collecting Keras>=2.2.5\n", - " Downloading Keras-2.3.1-py2.py3-none-any.whl (377 kB)\n", - "\u001b[K |████████████████████████████████| 377 kB 2.5 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting pandas>=0.22.0\n", - " Downloading pandas-1.0.3-cp36-cp36m-manylinux1_x86_64.whl (10.0 MB)\n", - "\u001b[K |████████████████████████████████| 10.0 MB 9.5 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting scikit-image>=0.15.0\n", - " Downloading scikit_image-0.16.2-cp36-cp36m-manylinux1_x86_64.whl (26.5 MB)\n", - "\u001b[K |████████████████████████████████| 26.5 MB 20.3 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting opencv-python>=4.1.2.30\n", - " Downloading opencv_python-4.2.0.34-cp36-cp36m-manylinux1_x86_64.whl (28.2 MB)\n", - "\u001b[K |████████████████████████████████| 28.2 MB 18.9 MB/s eta 0:00:01 |███▏ | 2.8 MB 18.9 MB/s eta 0:00:02\n", - "\u001b[?25hCollecting scikit-learn>=0.21.0numpy\n", - " Downloading scikit_learn-0.22.2.post1-cp36-cp36m-manylinux1_x86_64.whl (7.1 MB)\n", - "\u001b[K |████████████████████████████████| 7.1 MB 21.6 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: numpy>=1.9.1 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.18.2)\n", - "Collecting pyyaml\n", - " Downloading PyYAML-5.3.1.tar.gz (269 kB)\n", - "\u001b[K |████████████████████████████████| 269 kB 29.9 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.4.1)\n", - "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.14.0)\n", - "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (2.10.0)\n", - "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.1.0)\n", - "Collecting keras-applications>=1.0.6\n", - " Downloading Keras_Applications-1.0.8-py3-none-any.whl (50 kB)\n", - "\u001b[K |████████████████████████████████| 50 kB 6.0 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting pytz>=2017.2\n", - " Downloading pytz-2019.3-py2.py3-none-any.whl (509 kB)\n", - "\u001b[K |████████████████████████████████| 509 kB 19.7 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas>=0.22.0->cai==0.0.7) (2.8.1)\n", - "Collecting pillow>=4.3.0\n", - " Downloading Pillow-7.1.1-cp36-cp36m-manylinux1_x86_64.whl (2.1 MB)\n", - "\u001b[K |████████████████████████████████| 2.1 MB 18.9 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: matplotlib!=3.0.0,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.15.0->cai==0.0.7) (3.2.1)\n", - "Collecting PyWavelets>=0.4.0\n", - " Downloading PyWavelets-1.1.1-cp36-cp36m-manylinux1_x86_64.whl (4.4 MB)\n", - "\u001b[K |████████████████████████████████| 4.4 MB 21.5 MB/s eta 0:00:01 |▏ | 20 kB 17.4 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting networkx>=2.0\n", - " Downloading networkx-2.4-py3-none-any.whl (1.6 MB)\n", - "\u001b[K |████████████████████████████████| 1.6 MB 21.3 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting imageio>=2.3.0\n", - " Downloading imageio-2.8.0-py3-none-any.whl (3.3 MB)\n", - "\u001b[K |████████████████████████████████| 3.3 MB 11.3 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting joblib>=0.11\n", - " Downloading joblib-0.14.1-py2.py3-none-any.whl (294 kB)\n", - "\u001b[K |████████████████████████████████| 294 kB 28.3 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (2.4.6)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (1.1.0)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (0.10.0)\n", - "Requirement already satisfied: decorator>=4.3.0 in /usr/local/lib/python3.6/dist-packages (from networkx>=2.0->scikit-image>=0.15.0->cai==0.0.7) (4.4.2)\n", - "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from kiwisolver>=1.0.1->matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (46.0.0)\n", - "Building wheels for collected packages: cai, pyyaml\n", - " Building wheel for cai (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for cai: filename=cai-0.0.7-py3-none-any.whl size=16210 sha256=7fb10e66ba3a06bf428518b1c3d5cd01d63c10beff6e84ceedbff808c1f491e2\n", - " Stored in directory: /tmp/pip-ephem-wheel-cache-32wxu25a/wheels/f0/08/19/56f64e8c8cc45b0390e5e7e2f634c4c1aa0212065044fb6442\n", - " Building wheel for pyyaml (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for pyyaml: filename=PyYAML-5.3.1-cp36-cp36m-linux_x86_64.whl size=45919 sha256=a0d3189b4eaed85e19ae9d76693a74dd9f833c1af09a63c9c40a3760c45127e7\n", - " Stored in directory: /root/.cache/pip/wheels/e5/9d/ad/2ee53cf262cba1ffd8afe1487eef788ea3f260b7e6232a80fc\n", - "Successfully built cai pyyaml\n", - "Installing collected packages: pyyaml, keras-applications, Keras, pytz, pandas, pillow, PyWavelets, networkx, imageio, scikit-image, opencv-python, joblib, scikit-learn, cai\n", - "Successfully installed Keras-2.3.1 PyWavelets-1.1.1 cai-0.0.7 imageio-2.8.0 joblib-0.14.1 keras-applications-1.0.8 networkx-2.4 opencv-python-4.2.0.34 pandas-1.0.3 pillow-7.1.1 pytz-2019.3 pyyaml-5.3.1 scikit-image-0.16.2 scikit-learn-0.22.2.post1\n" - ] + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + }, + "colab": { + "provenance": [], + "machine_shape": "hm" } - ], - "source": [ - "import os\n", - "\n", - "if not os.path.isdir('k'):\n", - " !git clone https://github.com/joaopauloschuler/k-neural-api.git k\n", - "else:\n", - " !cd k && git pull\n", - "\n", - "!cd k && pip install ." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A neural-api/neural\n", - "A neural-api/examples\n", - "A neural-api/examples/SuperResolution\n", - "A neural-api/examples/SimplePlantLeafDisease\n", - "A neural-api/examples/SimpleTinyImageNet\n", - "A neural-api/examples/CaiOptimizedDenseNet\n", - "A neural-api/examples/CaiOptimizedDenseNet/results\n", - "A neural-api/examples/SimpleImageClassifier\n", - "A neural-api/examples/SimpleImageClassifier/results\n", - "A neural-api/examples/SimpleTinyImageNet/SimpleTinyImageNet.pas\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpr\n", - "A neural-api/neural/neuraldatasets.pas\n", - "A neural-api/examples/SimpleFashionMNIST\n", - "A neural-api/examples/SimpleFashionMNIST/results\n", - "A neural-api/examples/VisualGANTinyImagenet\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpr\n", - "A neural-api/examples/SimpleImageClassifier/results/SimpleImageClassifier20191012.csv\n", - "A neural-api/neural/neuraldatasetsv.pas\n", - "A neural-api/neural/neuralvolumev.pas\n", - "A neural-api/examples/CaiOptimizedDenseNet/results/CaiOptimizedDenseNet20191018.csv\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpi\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpi\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifier.lpi\n", - "A neural-api/examples/SimpleFashionMNIST/results/SimpleFashionMNIST20191018.csv\n", - "A neural-api/examples/GradientAscent\n", - "A neural-api/examples/SimpleImageClassifier/results/SimpleImageClassifier20191102.csv\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierCPU.ipynb\n", - "A neural-api/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpi\n", - "A neural-api/examples/VisualGANTinyImagenet/uvisualgantinyimagenet.lfm\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.lpr\n", - "A neural-api/examples/VisualGAN\n", - "A neural-api/neural/neuralvolume.pas\n", - "A neural-api/neural/neuralfit.pas\n", - "A neural-api/examples/GradientAscent/ugradientascent.pas\n", - "A neural-api/examples/GradientAscent/GradientAscent.lpi\n", - "A neural-api/examples/VisualGAN/uvisualgan.lfm\n", - "A neural-api/neural/neuralnetwork.pas\n", - "A neural-api/neural/neuralthread.pas\n", - "A neural-api/examples/ViewInnerPatterns\n", - "A neural-api/neural/neuralopencl.pas\n", - "A neural-api/neural/neuralbit.pas\n", - "A neural-api/neural/neuralnetwork.inc\n", - "A neural-api/examples/VisualGAN/VisualGAN.lpr\n", - "A neural-api/neural/neuralevolutionary.pas\n", - "A neural-api/examples/ImageClassifierSELU\n", - "A neural-api/examples/ImageClassifierSELU/results\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.ico\n", - "A neural-api/neural/neuralopenclv.pas\n", - "A neural-api/neural/neuralbyteprediction.pas\n", - "A neural-api/neural/neuralab.pas\n", - "A neural-api/examples/DenseNetBCL40\n", - "A neural-api/examples/DenseNetBCL40/results\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.res\n", - "A neural-api/examples/GradientAscent/GradientAscent.lpr\n", - "A neural-api/neural/neuralgeneric.pas\n", - "A neural-api/examples/ViewInnerPatterns/uviewinnerpatterns.lfm\n", - "A neural-api/examples/GradientAscent/GradientAscent.res\n", - "A neural-api/examples/DenseNetFashionMNIST\n", - "A neural-api/examples/DenseNetFashionMNIST/results\n", - "A neural-api/neural/neuralplanbuilder.pas\n", - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.lpr\n", - "A neural-api/examples/VisualGAN/VisualGAN.lpi\n", - "A neural-api/neural/readme.txt\n", - "A neural-api/examples/ImageClassifierSELU/results/ImageClassifierSELU20191102.csv\n", - "A neural-api/examples/Cifar100CaiDenseNet\n", - "A neural-api/examples/Cifar100CaiDenseNet/results\n", - "A neural-api/examples/SimpleMNist\n", - "A neural-api/examples/VisualGAN/VisualGAN.res\n", - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.ico\n", - "A neural-api/examples/ImageClassifierSELU/ImageClassifierSELU.lpi\n", - "A neural-api/neural/neural.cl\n", - "A neural-api/examples/SimpleMNist/results\n", - "A neural-api/examples/IdentityShortcutConnection\n", - "A neural-api/examples/DenseNetBCL40/results/DenseNetBLC40-20191024.csv\n", - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.res\n", - "A neural-api/neural/neuralabfun.pas\n", - "A neural-api/neural/neuralasm.inc\n", - "A neural-api/examples/SeparableConvolution\n", - "A neural-api/examples/SimpleImageClassifierGPU\n", - "A neural-api/examples/ImageClassifierSELU/ImageClassifierSELU.lpr\n", - "A neural-api/examples/DenseNetFashionMNIST/results/DenseNetFashionMNIST-20191025.csv\n", - "A neural-api/neural/neuralcache.pas\n", - "A neural-api/examples/SuperResolution/SuperResolutionTrain.lpi\n", - "A neural-api/examples/DenseNetBCL40/results/DenseNetBLC40-SELU.csv\n", - "A neural-api/examples/DenseNetBCL40/DenseNetBCL40.lpr\n", - "A neural-api/examples/XorAndOr\n", - "A neural-api/experiments\n", - "A neural-api/experiments/IncreaseResolution\n", - "A neural-api/examples/SuperResolution/SuperResolutionTrain.lpr\n", - "A neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.lpi\n", - "A neural-api/examples/DenseNetFashionMNIST/DenseNetFashionMNIST.lpr\n", - "A neural-api/examples/Cifar100CaiDenseNet/Cifar100CaiDenseNet.lpr\n", - "A neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas\n", - "A neural-api/examples/Cifar100CaiDenseNet/results/Cifar100CaiDenseNet20191020.csv\n", - "A neural-api/examples/SimpleTinyImageNet/SimpleTinyImageNet.lpi\n", - "A neural-api/examples/SimpleMNist/SimpleMNist.lpr\n", - "A neural-api/examples/IdentityShortcutConnection/IdentityShortcutConnection.lpi\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpi\n", - "A neural-api/examples/SimpleMNist/results/SimpleMNist20191014.csv\n", - "A neural-api/examples/SimpleMNist/results/SimpleMNist20191014.png\n", - "A neural-api/examples/SeparableConvolution/SeparableConvolution.lpi\n", - "A neural-api/examples/IdentityShortcutConnection/IdentityShortcutConnection.ipynb\n", - "A neural-api/examples/IdentityShortcutConnection/IdentityShortcutConnection.pas\n", - "A neural-api/experiments/tinyImageNet200\n", - "A neural-api/examples/XorAndOr/XorAndOr.lpi\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.ipynb\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpi\n", - "A neural-api/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpr\n", - "A neural-api/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.ipynb\n", - "A neural-api/examples/XorAndOr/XorAndOr.lpr\n", - "A neural-api/experiments/visualCifar10BatchUpdate\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.lpi\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.lpr\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.res\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.res\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.lpr\n", - "A neural-api/experiments/supersimple\n", - "A neural-api/experiments/testcnnalgo\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionlearn.lfm\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.ico\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionapp.lfm\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionlearn.pas\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierCPUResize48.ipynb\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifier.lpr\n", - "A neural-api/examples/SimpleImageClassifier/results/SimpleImageClassifier20191111.csv\n", - "A neural-api/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr\n", - "A neural-api/experiments/visualCifar10AnimalMachine\n", - "A neural-api/examples/VisualGANTinyImagenet/uvisualgantinyimagenet.pas\n", - "A neural-api/experiments/IncreaseResolution/uresizeutil.pas\n", - "A neural-api/experiments/visualCifar10OpenCL\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.lpi\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.lpi\n", - "A neural-api/experiments/tinyImageNet200/utinyimagenet200.pas\n", - "A neural-api/examples/GradientAscent/ugradientascent.lfm\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.lpi\n", - "A neural-api/examples/GradientAscent/GradientAscent.ico\n", - "A neural-api/experiments/visualCifar10learning\n", - "A neural-api/examples/VisualGAN/uvisualgan.pas\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.res\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.lpr\n", - "A neural-api/examples/VisualGAN/VisualGAN.ico\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.lpr\n", - "A neural-api/experiments/visualCifar10learning2\n", - "A neural-api/examples/ViewInnerPatterns/uviewinnerpatterns.pas\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.lpi\n", - "A neural-api/examples/ImageClassifierSELU/results/ImageClassifierSELU20191109.csv\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.ico\n", - "A neural-api/experiments/testcnnalgo/testcnnalgo.lpr\n", - "A neural-api/experiments/visualCifar10test\n", - "A neural-api/examples/ImageClassifierSELU/ImageClassifierSELU.ipynb\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.res\n", - "A neural-api/examples/DenseNetBCL40/results/DenseNetBLC40-I0.5-20191028.csv\n", - "A neural-api/experiments/visualCifar10BatchUpdate/uvisualcifar10learningbatchupdate.lfm\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.lpi\n", - "A neural-api/examples/DenseNetBCL40/DenseNetBCL40.lpi\n", - "A neural-api/experiments/3dCellularAutomata\n", - "A neural-api/experiments/testcnnalgo/testcnnalgo.lpi\n", - "A neural-api/examples/DenseNetFashionMNIST/DenseNetFashionMNIST.lpi\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.lpi\n", - "A neural-api/examples/Cifar100CaiDenseNet/Cifar100CaiDenseNet.lpi\n", - "A neural-api/examples/SimpleMNist/SimpleMNist.lpi\n", - "A neural-api/experiments/ConwayGameOfLife\n", - "A neural-api/examples/SeparableConvolution/SeparableConvolution.lpr\n", - "A neural-api/experiments/visualCifar10OpenCL/uvisualcifar10learningopencl.pas\n", - "A neural-api/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpi\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.res\n", - "A neural-api/experiments/supersimple/supersimple.lpr\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.ico\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.lpi\n", - "A neural-api/experiments/visualCifar10AnimalMachine/uvisualcifar10animalmachine.pas\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.lpr\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionapp.pas\n", - "A neural-api/experiments/visualCifar10OpenCL/uvisualcifar10learningopencl.lfm\n", - "A neural-api/experiments/tinyImageNet200/utinyimagenet200.lfm\n", - "A neural-api/experiments/LifeAppearance\n", - "A neural-api/experiments/visualCifar10learning/uvisualcifar10learning.pas\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.lpr\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.ico\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.res\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.lpr\n", - "A neural-api/experiments/visualCifar10BatchUpdate/uvisualcifar10learningbatchupdate.pas\n", - "A neural-api/experiments/MagicSquare\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.ico\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.lpi\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.res\n", - "A neural-api/experiments/supersimple/supersimple.lpi\n", - "A neural-api/experiments/visualCifar10learning2/uvisualcifar10learning.pas\n", - "A neural-api/experiments/visualCifar10test/uvisualcifar10test.pas\n", - "A neural-api/experiments/visualCifar10AnimalMachine/uvisualcifar10animalmachine.lfm\n", - "A neural-api/experiments/visualCifar10learning2/uvisualcifar10learning.lfm\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.lpr\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.ico\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.lpr\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.res\n", - "A neural-api/experiments/NeuralWebServer\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.res\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.ico\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.lpi\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.lpi\n", - "A neural-api/experiments/visualCifar10learning/uvisualcifar10learning.lfm\n", - "A neural-api/experiments/NineMensMorris\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.res\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.ico\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.lpi\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.ico\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.lpi\n", - "A neural-api/experiments/ConwayGameOfLife/UNIT1.DFM\n", - "A neural-api/experiments/3dCellularAutomata/Unit1.lfm\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.lpr\n", - "A neural-api/experiments/3dCellularAutomata/Unit1.pas\n", - "A neural-api/experiments/ConwayGameOfLife/lifeai.lfm\n", - "A neural-api/experiments/visualCifar10test/uvisualcifar10test.lfm\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.res\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.ico\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.lpr\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.ico\n", - "A neural-api/experiments/ConwayGameOfLife/UNIT1.PAS\n", - "A neural-api/experiments/ConwayGameOfLife/lifeai.pas\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.res\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.lpr\n", - "A neural-api/experiments/MagicSquare/MAGIC.ICO\n", - "A neural-api/experiments/ConwayGameOfLife/about.lfm\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.ico\n", - "A neural-api/experiments/SOM-NeuralNetwork\n", - "A neural-api/experiments/ConwayGameOfLife/LIFE2.ico\n", - "A neural-api/experiments/ConwayGameOfLife/about.pas\n", - "A neural-api/experiments/MagicSquare/MagicSquare.ico\n", - "A neural-api/experiments/MagicSquare/MagicSquare.res\n", - "A neural-api/experiments/mining\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.lpi\n", - "A neural-api/experiments/MagicSquare/UDARR.PAS\n", - "A neural-api/experiments/LifeAppearance/Unit1.pas\n", - "A neural-api/experiments/LifeAppearance/Unit1.lfm\n", - "A neural-api/experiments/MagicSquare/MagicSquare.lpi\n", - "A neural-api/experiments/MagicSquare/MagicSquare.lpr\n", - "A neural-api/experiments/MagicSquare/magic2.ico\n", - "A neural-api/experiments/MagicSquare/quada6g.lfm\n", - "A neural-api/experiments/MagicSquare/uabout.dfm\n", - "A neural-api/experiments/MagicSquare/quada6g.pas\n", - "A neural-api/experiments/NeuralWebServer/README.txt\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.lpr\n", - "A neural-api/experiments/supersimplecorrelation\n", - "A neural-api/experiments/supersimplehyperbolictangent\n", - "A neural-api/experiments/MagicSquare/uabout.pas\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.lpi\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.ico\n", - "A neural-api/experiments/NeuralWebServer/usimpleneuralwebserver.lfm\n", - "A neural-api/experiments/NeuralWebServer/usimpleneuralwebserver.pas\n", - "A neural-api/experiments/visualCifar10MT\n", - "A neural-api/experiments/NineMensMorris/MOINHO.lpr\n", - "A neural-api/experiments/NineMensMorris/MOINHO.ico\n", - "A neural-api/experiments/NineMensMorris/MOINHO.res\n", - "A neural-api/experiments/NineMensMorris/UAUX.pas\n", - "A neural-api/experiments/NineMensMorris/UVence.pas\n", - "A neural-api/experiments/visualCifar10NTL\n", - "A neural-api/experiments/NineMensMorris/TAB1.BMP\n", - "A neural-api/experiments/NineMensMorris/UIA.PAS\n", - "A neural-api/experiments/NineMensMorris/UMOINHO.PAS\n", - "A neural-api/experiments/NineMensMorris/princ.pas\n", - "A neural-api/experiments/NineMensMorris/dialog.lfm\n", - "A neural-api/docs\n", - "A neural-api/libs\n", - "A neural-api/experiments/NineMensMorris/dialog.pas\n", - "A neural-api/experiments/SOM-NeuralNetwork/URSOM.PAS\n", - "A neural-api/experiments/SOM-NeuralNetwork/PSOM1.lpi\n", - "A neural-api/experiments/SOM-NeuralNetwork/PSOM1.lpr\n", - "A neural-api/experiments/mining/PMinera.lpi\n", - "A neural-api/experiments/SOM-NeuralNetwork/USOM1.lfm\n", - "A neural-api/experiments/SOM-NeuralNetwork/USOM1.pas\n", - "A neural-api/experiments/mining/UFRob1.pas\n", - "A neural-api/experiments/mining/PMinera.lpr\n", - "A neural-api/experiments/mining/PMinera.res\n", - "A neural-api/experiments/mining/UForOptMin.pas\n", - "A neural-api/experiments/mining/UForOptMin.lfm\n", - "A neural-api/experiments/mining/UVPlan.lfm\n", - "A neural-api/experiments/mining/URobMin2.pas\n", - "A neural-api/experiments/mining/UVPlan.pas\n", - "A neural-api/experiments/supersimplehyperbolictangent/supersimplehyperbolictangent.lpi\n", - "A neural-api/experiments/supersimplehyperbolictangent/supersimplehyperbolictangent.lpr\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A neural-api/experiments/supersimplecorrelation/supersimplecorrelation.lpi\n", - "A neural-api/experiments/visualCifar10MT/uvisualcifar10learningmt.lfm\n", - "A neural-api/experiments/visualCifar10MT/uvisualcifar10learningmt.pas\n", - "A neural-api/experiments/visualCifar10NTL/uvisualcifar10learningmt.lfm\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.res\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.ico\n", - "A neural-api/experiments/visualCifar10NTL/uvisualcifar10learningmt.pas\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.lpr\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.lpi\n", - "A neural-api/libs/backup\n", - "A neural-api/opencl\n", - "A neural-api/opencl/dot-product-test\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.res\n", - "A neural-api/LICENSE-EXCEPTION.LGPL\n", - "A neural-api/LICENSE\n", - "A neural-api/readme.txt\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.exe\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.res\n", - "A neural-api/experiments/NineMensMorris/MOINHO.lpi\n", - "A neural-api/experiments/NineMensMorris/UAUX.lfm\n", - "A neural-api/experiments/NineMensMorris/UVence.lfm\n", - "A neural-api/opencl/easy-trillion-test\n", - "A neural-api/libs/uconvolutionneuralnetwork.pas\n", - "A neural-api/libs/neuralnetwork.inc\n", - "A neural-api/libs/ueasyopencl.pas\n", - "A neural-api/experiments/NineMensMorris/princ.lfm\n", - "A neural-api/experiments/SOM-NeuralNetwork/PSOM1.res\n", - "A neural-api/experiments/mining/PMinera.ico\n", - "A neural-api/experiments/mining/UFRob1.lfm\n", - "A neural-api/experiments/mining/URobMin.pas\n", - "A neural-api/experiments/supersimplecorrelation/supersimplecorrelation.lpr\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.lpi\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.lpr\n", - "A neural-api/opencl/trillion-test\n", - "A neural-api/libs/uvolume.pas\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.ico\n", - "A neural-api/thirdpartylibs\n", - "A neural-api/thirdpartylibs/synapse\n", - "A neural-api/docs/cai.png\n", - "A neural-api/libs/readme.txt\n", - "A neural-api/libs/not_in_use_kernels.cl\n", - "A neural-api/libs/uab.pas\n", - "A neural-api/libs/neuralasm.inc\n", - "A neural-api/bin\n", - "A neural-api/bin/x86_64-win64\n", - "A neural-api/bin/x86_64-win64/bin\n", - "A neural-api/libs/ucifar10lcl.pas\n", - "A neural-api/libs/ubit.pas\n", - "A neural-api/libs/uabfun.pas\n", - "A neural-api/libs/udum.pas\n", - "A neural-api/libs/ucifar10.pas\n", - "A neural-api/libs/cai_dot_product.cl\n", - "A neural-api/libs/uplanbuilder.pas\n", - "A neural-api/libs/uvolumelcl.pas\n", - "A neural-api/libs/ubyteprediction.pas\n", - "A neural-api/libs/uevolutionary.pas\n", - "A neural-api/libs/uarraycache.pas\n", - "A neural-api/libs/COPYING.txt\n", - "A neural-api/opencl/dot-product-test/dot_product_test.lpr\n", - "A neural-api/libs/ugeneric.pas\n", - "A neural-api/libs/ueasyopenclcl.pas\n", - "A neural-api/libs/ubackpropagation.pas\n", - "A neural-api/opencl/dot-product-test/dot_product_test.res\n", - "A neural-api/opencl/dot-product-test/dot_product_test.lpi\n", - "A neural-api/opencl/dot-product-test/dot_product_test.ico\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.lpi\n", - "A neural-api/opencl/easy-trillion-test/ueasy_trillion_test_form.lfm\n", - "A neural-api/opencl/dot-product-test/dot_product_test_form.pas\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.res\n", - "A neural-api/libs/ntl.pas\n", - "A neural-api/opencl/dot-product-test/dot_product_test_form.lfm\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.lpr\n", - "A neural-api/opencl/easy-trillion-test/ueasy_trillion_test_form.pas\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.lpi\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.ico\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.lpr\n", - "A neural-api/opencl/easy-trillion-test/evolve_easy.cl\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.res\n", - "A neural-api/opencl/trillion-test/uopencl_trillion_test.pas\n", - "A neural-api/opencl/trillion-test/evolve_billion.cl\n", - "A neural-api/opencl/trillion-test/evolve_trillion.cl\n", - "A neural-api/opencl/trillion-test/README.TXT\n", - "A neural-api/opencl/trillion-test/uopencl_trillion_test.lfm\n", - "A neural-api/opencl/trillion-test/Frm_OpenCLTestMain.pas\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.ico\n", - "A neural-api/bin/readme.txt\n", - "A neural-api/thirdpartylibs/synapse/README.txt\n", - "Checked out revision 1286.\n" - ] - } - ], - "source": [ - "!svn checkout https://svn.code.sf.net/p/cai/svncode/trunk/lazarus neural-api" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CopySecondaryConfigFile /etc/lazarus/environmentoptions.xml -> /root/.lazarus/environmentoptions.xml\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"compilertest.pas\"\n", - "Hint: (lazarus) Missing state file of MultiThreadProcsLaz 1.2.1: /tf/mtprocs/lib/x86_64-linux/MultiThreadProcsLaz.compiled\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\" \"-Px86_64\" \"-Tlinux\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"-Px86_64\" \"-Tlinux\" \"compilertest.pas\"\n", - "Info: (lazarus) Execute Title=\"Compile package MultiThreadProcsLaz 1.2.1\"\n", - "Info: (lazarus) Working Directory=\"/tf/mtprocs/\"\n", - "Info: (lazarus) Executable=\"/usr/bin/fpc\"\n", - "Info: (lazarus) Param[0]=\"-B\"\n", - "Info: (lazarus) Param[1]=\"-MObjFPC\"\n", - "Info: (lazarus) Param[2]=\"-Scghi\"\n", - "Info: (lazarus) Param[3]=\"-Cg\"\n", - "Info: (lazarus) Param[4]=\"-O1\"\n", - "Info: (lazarus) Param[5]=\"-g\"\n", - "Info: (lazarus) Param[6]=\"-gl\"\n", - "Info: (lazarus) Param[7]=\"-l\"\n", - "Info: (lazarus) Param[8]=\"-vewnhibq\"\n", - "Info: (lazarus) Param[9]=\"-Fu/usr/lib/lazarus/1.8.2/packager/units/x86_64-linux\"\n", - "Info: (lazarus) Param[10]=\"-Fu/tf/mtprocs/\"\n", - "Info: (lazarus) Param[11]=\"-FU/tf/mtprocs/lib/x86_64-linux/\"\n", - "Info: (lazarus) Param[12]=\"multithreadprocslaz.pas\"\n", - "Hint: (11030) Start of reading config file /etc/fpc.cfg\n", - "Hint: (11031) End of reading config file /etc/fpc.cfg\n", - "Free Pascal Compiler version 3.0.4+dfsg-18ubuntu2 [2018/08/29] for x86_64\n", - "Copyright (c) 1993-2017 by Florian Klaempfl and others\n", - "(1002) Target OS: Linux for x86-64\n", - "(3104) Compiling multithreadprocslaz.pas\n", - "(3104) Compiling mtprocs.pas\n", - "(3104) Compiling mtpcpu.pas\n", - "(3104) Compiling mtputils.pas\n", - "/tf/mtprocs/mtputils.pas(40,43) Hint: (5024) Parameter \"Data\" not used\n", - "/tf/mtprocs/multithreadprocslaz.pas(10,10) Hint: (5023) Unit \"MTPUtils\" not used in MultiThreadProcsLaz\n", - "/tf/mtprocs/multithreadprocslaz.pas(10,20) Hint: (5023) Unit \"MTPCPU\" not used in MultiThreadProcsLaz\n", - "(1008) 1215 lines compiled, 0.1 sec\n", - "(1022) 5 hint(s) issued\n" - ] - } - ], - "source": [ - "!lazbuild mtprocs/multithreadprocslaz.lpk" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"compilertest.pas\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\" \"-Px86_64\" \"-Tlinux\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"-Px86_64\" \"-Tlinux\" \"compilertest.pas\"\n", - "Info: (lazarus) Execute Title=\"Compile Project, Mode: Default, Target: /tf/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\"\n", - "Info: (lazarus) Working Directory=\"/tf/neural-api/examples/SimplePlantLeafDisease/\"\n", - "Info: (lazarus) Executable=\"/usr/bin/fpc\"\n", - "Info: (lazarus) Param[0]=\"-MObjFPC\"\n", - "Info: (lazarus) Param[1]=\"-Scghi\"\n", - "Info: (lazarus) Param[2]=\"-Cg\"\n", - "Info: (lazarus) Param[3]=\"-O3\"\n", - "Info: (lazarus) Param[4]=\"-l\"\n", - "Info: (lazarus) Param[5]=\"-vewnhibq\"\n", - "Info: (lazarus) Param[6]=\"-Fi/tf/neural-api/neural\"\n", - "Info: (lazarus) Param[7]=\"-Fi/tf/neural-api/bin/x86_64-linux/units\"\n", - "Info: (lazarus) Param[8]=\"-Fu/tf/neural-api/neural\"\n", - "Info: (lazarus) Param[9]=\"-Fu/usr/lib/lazarus/1.8.2/lcl/units/x86_64-linux\"\n", - "Info: (lazarus) Param[10]=\"-Fu/usr/lib/lazarus/1.8.2/components/lazutils/lib/x86_64-linux\"\n", - "Info: (lazarus) Param[11]=\"-Fu/tf/mtprocs/lib/x86_64-linux\"\n", - "Info: (lazarus) Param[12]=\"-Fu/usr/lib/lazarus/1.8.2/packager/units/x86_64-linux\"\n", - "Info: (lazarus) Param[13]=\"-Fu/tf/neural-api/examples/SimplePlantLeafDisease/\"\n", - "Info: (lazarus) Param[14]=\"-FU/tf/neural-api/bin/x86_64-linux/units/\"\n", - "Info: (lazarus) Param[15]=\"-FE/tf/neural-api/bin/x86_64-linux/bin/\"\n", - "Info: (lazarus) Param[16]=\"-dUseCThreads\"\n", - "Info: (lazarus) Param[17]=\"-dAVX\"\n", - "Info: (lazarus) Param[18]=\"-dRelease\"\n", - "Info: (lazarus) Param[19]=\"SimplePlantLeafDisease.pas\"\n", - "Hint: (11030) Start of reading config file /etc/fpc.cfg\n", - "Compiling Release Version\n", - "Hint: (11031) End of reading config file /etc/fpc.cfg\n", - "Free Pascal Compiler version 3.0.4+dfsg-18ubuntu2 [2018/08/29] for x86_64\n", - "/usr/bin/ld.bfd: warning: /tf/neural-api/bin/x86_64-linux/bin/link.res contains output sections; did you forget -T?\n", - "Copyright (c) 1993-2017 by Florian Klaempfl and others\n", - "(1002) Target OS: Linux for x86-64\n", - "(3104) Compiling SimplePlantLeafDisease.pas\n", - "/tf/neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas(14,60) Hint: (5023) Unit \"math\" not used in SimplePlantLeafDisease\n", - "(9015) Linking /tf/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\n", - "(1008) 95 lines compiled, 0.5 sec\n", - "(1022) 3 hint(s) issued\n" - ] - } - ], - "source": [ - "!lazbuild neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.lpi" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using TensorFlow backend.\n" - ] - } - ], - "source": [ - "import cai.layers\n", - "import cai.datasets\n", - "import cai.models" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Downloading: http://cs231n.stanford.edu/tiny-imagenet-200.zip to tiny-imagenet-200.zip\n", - "Decompressing into: tiny-imagenet-200\n" - ] - } - ], - "source": [ - "if (has_tiny_imagenet_200):\n", - " url_zip_file=\"http://cs231n.stanford.edu/tiny-imagenet-200.zip\"\n", - " local_zip_file=\"tiny-imagenet-200.zip\"\n", - " expected_folder_name=\"download-tiny-imagenet-200\"\n", - " Verbose=True\n", - " cai.datasets.download_zip_and_extract(\n", - " url_zip_file=url_zip_file, local_zip_file=local_zip_file, \n", - " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", - " if os.path.isdir('download-tiny-imagenet-200/tiny-imagenet-200'):\n", - " !mv download-tiny-imagenet-200/tiny-imagenet-200 tiny-imagenet-200" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "if (has_plant_leaf_disease):\n", - " url_zip_file=\"https://data.mendeley.com/datasets/tywbtsjrjv/1/files/d5652a28-c1d8-4b76-97f3-72fb80f94efc/Plant_leaf_diseases_dataset_without_augmentation.zip?dl=1\"\n", - " local_zip_file=\"plant_leaf.zip\"\n", - " expected_folder_name=\"plant_leaf\"\n", - " Verbose=True\n", - " cai.datasets.download_zip_and_extract(\n", - " url_zip_file=url_zip_file, local_zip_file=local_zip_file, \n", - " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", - " if os.path.isdir('plant_leaf/Plant_leave_diseases_dataset_without_augmentation'):\n", - " !mv plant_leaf/Plant_leave_diseases_dataset_without_augmentation plant" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "RUNNING: SimplePlantLeafDisease\n", - "Creating Neural Network...\n", - " Layers: 14\n", - " Neurons:424\n", - " Weights:251522 Sum: 1.066439\n", - "Layer 0 Neurons: 0 Weights: 0 TNNetInput(128,128,3,0,0) Output:128,128,3 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Branches:1\n", - "Layer 1 Neurons: 64 Weights: 4800 TNNetConvolutionLinear(64,5,4,2,0) Output:66,66,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 9.3083 Parent:0 Branches:1\n", - "Layer 2 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:1 Branches:1\n", - "Layer 3 Neurons: 1 Weights: 2 TNNetMovingStdNormalization(0,0,0,0,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 1.0000 Parent:2 Branches:1\n", - "Layer 4 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum:-16.7340 Parent:3 Branches:1\n", - "Layer 5 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -2.0621 Parent:4 Branches:1\n", - "Layer 6 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:17,17,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:5 Branches:1\n", - "Layer 7 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:17,17,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -3.9453 Parent:6 Branches:1\n", - "Layer 8 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:17,17,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 3.3115 Parent:7 Branches:1\n", - "Layer 9 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,2,0) Output:9,9,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 2.7569 Parent:8 Branches:1\n", - "Layer 10 Neurons: 0 Weights: 0 TNNetDropout(2,1,0,0,0) Output:9,9,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:9 Branches:1\n", - "Layer 11 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:5,5,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:10 Branches:1\n", - "Layer 12 Neurons: 39 Weights: 62400 TNNetFullConnectLinear(39,1,1,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 7.4312 Parent:11 Branches:1\n", - "Layer 13 Neurons: 0 Weights: 0 TNNetSoftMax(0,0,0,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:12 Branches:0\n", - "Loading 100% of the Plant leave disease dataset into memory.\n", - "Training Images:49904 Validation Images:2775 Test Images:2775\n", - "File name is: SimplePlantLeafDisease\n", - "Learning rate:0.001000 L2 decay:0.000010 Inertia:0.900000 Batch size:64 Step size:64 Staircase ephocs:10\n", - "Training images:49904\n", - "Validation images:2775\n", - "Test images:2775\n", - "Computing...\n", - "640 Examples seen. Accuracy:0.0329 Error: 1.93573 Loss:3.52334 Threads: 8 Forward time: 4.93s Backward time: 4.51s Step time: 4.20s\n", - "1280 Examples seen. Accuracy:0.0402 Error: 1.92410 Loss:3.40404 Threads: 8 Forward time: 5.06s Backward time: 4.55s Step time: 4.41s\n", - "1920 Examples seen. Accuracy:0.0502 Error: 1.90493 Loss:3.37106 Threads: 8 Forward time: 4.96s Backward time: 4.62s Step time: 4.76s\n", - "2560 Examples seen. Accuracy:0.0650 Error: 1.79781 Loss:3.00073 Threads: 8 Forward time: 4.89s Backward time: 4.57s Step time: 4.46s\n", - "3200 Examples seen. Accuracy:0.0769 Error: 1.81143 Loss:3.05211 Threads: 8 Forward time: 5.20s Backward time: 4.65s Step time: 4.43s\n", - "3840 Examples seen. Accuracy:0.0909 Error: 1.82340 Loss:2.95677 Threads: 8 Forward time: 5.02s Backward time: 4.55s Step time: 4.44s\n", - "4480 Examples seen. Accuracy:0.1032 Error: 1.73382 Loss:2.70320 Threads: 8 Forward time: 5.02s Backward time: 4.50s Step time: 4.45s\n", - "5120 Examples seen. Accuracy:0.1184 Error: 1.74171 Loss:2.65646 Threads: 8 Forward time: 5.09s Backward time: 4.61s Step time: 4.43s\n", - "5760 Examples seen. Accuracy:0.1323 Error: 1.61957 Loss:2.76495 Threads: 8 Forward time: 5.06s Backward time: 4.54s Step time: 4.47s\n", - "6400 Examples seen. Accuracy:0.1503 Error: 1.41402 Loss:2.30839 Threads: 8 Forward time: 4.99s Backward time: 4.49s Step time: 4.43s\n", - "7040 Examples seen. Accuracy:0.1664 Error: 1.63357 Loss:2.37070 Threads: 8 Forward time: 5.08s Backward time: 4.62s Step time: 4.50s\n", - "7680 Examples seen. Accuracy:0.1841 Error: 1.53345 Loss:2.28641 Threads: 8 Forward time: 4.99s Backward time: 4.60s Step time: 4.49s\n", - "8320 Examples seen. Accuracy:0.2068 Error: 1.42229 Loss:1.94608 Threads: 8 Forward time: 5.11s Backward time: 4.65s Step time: 4.46s\n", - "8960 Examples seen. Accuracy:0.2221 Error: 1.48743 Loss:2.20207 Threads: 8 Forward time: 5.25s Backward time: 4.77s Step time: 4.46s\n", - "9600 Examples seen. Accuracy:0.2384 Error: 1.44126 Loss:2.10981 Threads: 8 Forward time: 4.90s Backward time: 4.53s Step time: 4.33s\n", - "10240 Examples seen. Accuracy:0.2583 Error: 1.36298 Loss:1.69164 Threads: 8 Forward time: 4.92s Backward time: 4.49s Step time: 4.37s\n", - "10880 Examples seen. Accuracy:0.2738 Error: 1.53760 Loss:2.12548 Threads: 8 Forward time: 5.30s Backward time: 4.63s Step time: 4.36s\n", - "11520 Examples seen. Accuracy:0.2911 Error: 1.45308 Loss:2.05101 Threads: 8 Forward time: 4.83s Backward time: 4.39s Step time: 4.31s\n", - "12160 Examples seen. Accuracy:0.3092 Error: 1.31594 Loss:1.87571 Threads: 8 Forward time: 4.90s Backward time: 4.46s Step time: 4.32s\n", - "12800 Examples seen. Accuracy:0.3235 Error: 1.25067 Loss:1.94517 Threads: 8 Forward time: 4.92s Backward time: 4.40s Step time: 4.24s\n", - "13440 Examples seen. Accuracy:0.3400 Error: 1.27565 Loss:1.63616 Threads: 8 Forward time: 4.97s Backward time: 4.34s Step time: 4.26s\n", - "14080 Examples seen. Accuracy:0.3532 Error: 1.40328 Loss:2.05918 Threads: 8 Forward time: 4.95s Backward time: 4.35s Step time: 4.34s\n", - "14720 Examples seen. Accuracy:0.3659 Error: 1.44345 Loss:2.47078 Threads: 8 Forward time: 5.17s Backward time: 4.60s Step time: 4.31s\n", - "15360 Examples seen. Accuracy:0.3776 Error: 1.12124 Loss:1.53043 Threads: 8 Forward time: 5.07s Backward time: 4.36s Step time: 4.27s\n", - "16000 Examples seen. Accuracy:0.3907 Error: 1.24098 Loss:1.93580 Threads: 8 Forward time: 5.10s Backward time: 4.49s Step time: 4.25s\n", - "16640 Examples seen. Accuracy:0.4039 Error: 1.15337 Loss:1.69533 Threads: 8 Forward time: 5.27s Backward time: 4.52s Step time: 4.24s\n", - "17280 Examples seen. Accuracy:0.4182 Error: 1.20605 Loss:1.58746 Threads: 8 Forward time: 4.83s Backward time: 4.31s Step time: 4.22s\n", - "17920 Examples seen. Accuracy:0.4258 Error: 1.19611 Loss:1.67349 Threads: 8 Forward time: 4.92s Backward time: 4.35s Step time: 4.26s\n", - "18560 Examples seen. Accuracy:0.4384 Error: 1.17074 Loss:1.46777 Threads: 8 Forward time: 4.87s Backward time: 4.36s Step time: 4.23s\n", - "19200 Examples seen. Accuracy:0.4506 Error: 1.23526 Loss:1.92160 Threads: 8 Forward time: 5.06s Backward time: 4.38s Step time: 4.27s\n", - "19840 Examples seen. Accuracy:0.4551 Error: 1.22965 Loss:1.48649 Threads: 8 Forward time: 5.01s Backward time: 4.38s Step time: 4.32s\n", - "20480 Examples seen. Accuracy:0.4627 Error: 1.16185 Loss:1.66194 Threads: 8 Forward time: 4.88s Backward time: 4.36s Step time: 4.29s\n", - "21120 Examples seen. Accuracy:0.4658 Error: 1.23110 Loss:1.74371 Threads: 8 Forward time: 4.97s Backward time: 4.40s Step time: 4.28s\n", - "21760 Examples seen. Accuracy:0.4719 Error: 1.24056 Loss:1.99473 Threads: 8 Forward time: 4.95s Backward time: 4.33s Step time: 4.28s\n", - "22400 Examples seen. Accuracy:0.4806 Error: 0.96615 Loss:1.39614 Threads: 8 Forward time: 5.07s Backward time: 4.40s Step time: 4.25s\n", - "23040 Examples seen. Accuracy:0.4913 Error: 1.12965 Loss:1.54893 Threads: 8 Forward time: 5.08s Backward time: 4.36s Step time: 4.37s\n", - "23680 Examples seen. Accuracy:0.5023 Error: 1.11976 Loss:1.58815 Threads: 8 Forward time: 5.00s Backward time: 4.35s Step time: 4.30s\n", - "24320 Examples seen. Accuracy:0.5051 Error: 1.11871 Loss:1.45467 Threads: 8 Forward time: 5.22s Backward time: 4.59s Step time: 4.33s\n", - "24960 Examples seen. Accuracy:0.5128 Error: 1.01548 Loss:1.38496 Threads: 8 Forward time: 4.99s Backward time: 4.36s Step time: 4.34s\n", - "25600 Examples seen. Accuracy:0.5208 Error: 0.99291 Loss:1.30965 Threads: 8 Forward time: 5.02s Backward time: 4.41s Step time: 4.34s\n", - "26240 Examples seen. Accuracy:0.5313 Error: 0.99596 Loss:1.26464 Threads: 8 Forward time: 4.90s Backward time: 4.40s Step time: 4.74s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "26880 Examples seen. Accuracy:0.5353 Error: 1.00507 Loss:1.27191 Threads: 8 Forward time: 5.30s Backward time: 4.64s Step time: 4.42s\n", - "27520 Examples seen. Accuracy:0.5427 Error: 1.00052 Loss:1.25032 Threads: 8 Forward time: 4.95s Backward time: 4.36s Step time: 4.24s\n", - "28160 Examples seen. Accuracy:0.5459 Error: 1.08495 Loss:1.37669 Threads: 8 Forward time: 4.93s Backward time: 4.30s Step time: 4.33s\n", - "28800 Examples seen. Accuracy:0.5517 Error: 1.01282 Loss:1.20652 Threads: 8 Forward time: 4.98s Backward time: 4.29s Step time: 4.29s\n", - "29440 Examples seen. Accuracy:0.5557 Error: 1.01782 Loss:1.64236 Threads: 8 Forward time: 4.97s Backward time: 4.28s Step time: 4.27s\n", - "30080 Examples seen. Accuracy:0.5602 Error: 1.01594 Loss:1.34733 Threads: 8 Forward time: 5.09s Backward time: 4.30s Step time: 4.47s\n", - "30720 Examples seen. Accuracy:0.5625 Error: 0.96419 Loss:1.50042 Threads: 8 Forward time: 4.91s Backward time: 4.23s Step time: 4.23s\n", - "31360 Examples seen. Accuracy:0.5655 Error: 0.97419 Loss:1.09295 Threads: 8 Forward time: 4.95s Backward time: 4.25s Step time: 4.23s\n", - "32000 Examples seen. Accuracy:0.5690 Error: 1.03017 Loss:1.38354 Threads: 8 Forward time: 4.87s Backward time: 4.25s Step time: 4.21s\n", - "32640 Examples seen. Accuracy:0.5699 Error: 1.10302 Loss:1.21681 Threads: 8 Forward time: 4.93s Backward time: 4.25s Step time: 4.19s\n", - "33280 Examples seen. Accuracy:0.5746 Error: 0.91844 Loss:1.02189 Threads: 8 Forward time: 4.93s Backward time: 4.29s Step time: 4.23s\n", - "33920 Examples seen. Accuracy:0.5749 Error: 1.07339 Loss:1.34992 Threads: 8 Forward time: 4.99s Backward time: 4.31s Step time: 4.23s\n", - "34560 Examples seen. Accuracy:0.5769 Error: 0.83645 Loss:0.93622 Threads: 8 Forward time: 4.94s Backward time: 4.24s Step time: 4.21s\n", - "35200 Examples seen. Accuracy:0.5834 Error: 0.90330 Loss:1.05480 Threads: 8 Forward time: 4.98s Backward time: 4.23s Step time: 4.23s\n", - "35840 Examples seen. Accuracy:0.5869 Error: 0.93318 Loss:1.45615 Threads: 8 Forward time: 4.97s Backward time: 4.18s Step time: 4.08s\n", - "36480 Examples seen. Accuracy:0.5874 Error: 0.91055 Loss:1.14300 Threads: 8 Forward time: 5.00s Backward time: 4.19s Step time: 4.12s\n", - "37120 Examples seen. Accuracy:0.5877 Error: 0.88337 Loss:1.03457 Threads: 8 Forward time: 4.95s Backward time: 4.19s Step time: 4.12s\n", - "37760 Examples seen. Accuracy:0.5890 Error: 0.88038 Loss:1.11912 Threads: 8 Forward time: 4.95s Backward time: 4.18s Step time: 4.11s\n", - "38400 Examples seen. Accuracy:0.5929 Error: 0.88309 Loss:1.03070 Threads: 8 Forward time: 4.85s Backward time: 4.17s Step time: 4.17s\n", - "39040 Examples seen. Accuracy:0.6014 Error: 0.81315 Loss:0.87636 Threads: 8 Forward time: 4.90s Backward time: 4.18s Step time: 4.13s\n", - "39680 Examples seen. Accuracy:0.6080 Error: 0.97946 Loss:1.21024 Threads: 8 Forward time: 4.88s Backward time: 4.26s Step time: 4.16s\n", - "40320 Examples seen. Accuracy:0.6141 Error: 0.68760 Loss:1.04425 Threads: 8 Forward time: 4.86s Backward time: 4.19s Step time: 4.23s\n", - "40960 Examples seen. Accuracy:0.6174 Error: 1.00662 Loss:1.25669 Threads: 8 Forward time: 4.91s Backward time: 4.23s Step time: 4.13s\n", - "41600 Examples seen. Accuracy:0.6202 Error: 0.92953 Loss:1.22395 Threads: 8 Forward time: 4.84s Backward time: 4.19s Step time: 4.11s\n", - "42240 Examples seen. Accuracy:0.6241 Error: 1.03482 Loss:1.35462 Threads: 8 Forward time: 4.92s Backward time: 4.19s Step time: 4.12s\n", - "42880 Examples seen. Accuracy:0.6260 Error: 0.93275 Loss:1.05832 Threads: 8 Forward time: 4.82s Backward time: 4.14s Step time: 4.09s\n", - "43520 Examples seen. Accuracy:0.6306 Error: 0.85488 Loss:1.03400 Threads: 8 Forward time: 4.94s Backward time: 4.26s Step time: 4.14s\n", - "44160 Examples seen. Accuracy:0.6354 Error: 0.89946 Loss:1.16312 Threads: 8 Forward time: 4.91s Backward time: 4.21s Step time: 4.14s\n", - "44800 Examples seen. Accuracy:0.6343 Error: 1.02987 Loss:1.36401 Threads: 8 Forward time: 4.91s Backward time: 4.16s Step time: 4.16s\n", - "45440 Examples seen. Accuracy:0.6371 Error: 0.87058 Loss:0.98873 Threads: 8 Forward time: 4.91s Backward time: 4.15s Step time: 4.19s\n", - "46080 Examples seen. Accuracy:0.6375 Error: 1.08666 Loss:1.32725 Threads: 8 Forward time: 4.90s Backward time: 4.23s Step time: 4.12s\n", - "46720 Examples seen. Accuracy:0.6396 Error: 0.96513 Loss:1.08828 Threads: 8 Forward time: 4.89s Backward time: 4.16s Step time: 4.13s\n", - "47360 Examples seen. Accuracy:0.6416 Error: 0.97114 Loss:1.29618 Threads: 8 Forward time: 4.87s Backward time: 4.16s Step time: 4.14s\n", - "48000 Examples seen. Accuracy:0.6439 Error: 0.87264 Loss:1.09346 Threads: 8 Forward time: 5.05s Backward time: 4.17s Step time: 4.12s\n", - "48640 Examples seen. Accuracy:0.6484 Error: 0.94305 Loss:1.14566 Threads: 8 Forward time: 4.94s Backward time: 4.22s Step time: 4.16s\n", - "49280 Examples seen. Accuracy:0.6503 Error: 0.94371 Loss:1.15029 Threads: 8 Forward time: 4.93s Backward time: 4.11s Step time: 4.22s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 1 Examples seen:49904 Validation Accuracy: 0.7630 Validation Error: 0.6617 Validation Loss: 0.7862 Total time: 6.10min\n", - "Epoch time: 5.5 minutes. 100 epochs: 9.1 hours.\n", - "Epochs: 1. Working time: 0.1 hours.\n", - "50544 Examples seen. Accuracy:0.6560 Error: 0.75665 Loss:1.07503 Threads: 8 Forward time: 4.83s Backward time: 4.18s Step time: 4.18s\n", - "51184 Examples seen. Accuracy:0.6633 Error: 0.71766 Loss:0.91668 Threads: 8 Forward time: 4.96s Backward time: 4.19s Step time: 4.15s\n", - "51824 Examples seen. Accuracy:0.6619 Error: 0.98168 Loss:1.31640 Threads: 8 Forward time: 4.88s Backward time: 4.13s Step time: 4.14s\n", - "52464 Examples seen. Accuracy:0.6665 Error: 0.69870 Loss:0.84428 Threads: 8 Forward time: 4.91s Backward time: 4.15s Step time: 4.20s\n", - "53104 Examples seen. Accuracy:0.6701 Error: 0.73452 Loss:1.04197 Threads: 8 Forward time: 4.92s Backward time: 4.22s Step time: 4.20s\n", - "53744 Examples seen. Accuracy:0.6740 Error: 0.73798 Loss:0.91838 Threads: 8 Forward time: 4.91s Backward time: 4.08s Step time: 4.23s\n", - "54384 Examples seen. Accuracy:0.6785 Error: 0.80641 Loss:0.95520 Threads: 8 Forward time: 5.00s Backward time: 4.16s Step time: 4.28s\n", - "55024 Examples seen. Accuracy:0.6825 Error: 0.67690 Loss:0.79173 Threads: 8 Forward time: 4.90s Backward time: 4.11s Step time: 4.11s\n", - "55664 Examples seen. Accuracy:0.6826 Error: 0.73889 Loss:1.13018 Threads: 8 Forward time: 4.90s Backward time: 4.18s Step time: 4.08s\n", - "56304 Examples seen. Accuracy:0.6812 Error: 0.95271 Loss:1.17909 Threads: 8 Forward time: 4.96s Backward time: 4.19s Step time: 4.15s\n", - "56944 Examples seen. Accuracy:0.6839 Error: 0.78306 Loss:1.23661 Threads: 8 Forward time: 5.08s Backward time: 4.19s Step time: 4.19s\n", - "57584 Examples seen. Accuracy:0.6876 Error: 0.62120 Loss:0.71262 Threads: 8 Forward time: 5.48s Backward time: 4.70s Step time: 4.16s\n", - "58224 Examples seen. Accuracy:0.6914 Error: 0.63904 Loss:0.67689 Threads: 8 Forward time: 5.16s Backward time: 4.27s Step time: 4.11s\n", - "58864 Examples seen. Accuracy:0.6939 Error: 0.83921 Loss:1.03057 Threads: 8 Forward time: 4.87s Backward time: 4.11s Step time: 4.15s\n", - "59504 Examples seen. Accuracy:0.6940 Error: 0.95884 Loss:1.17702 Threads: 8 Forward time: 4.99s Backward time: 4.15s Step time: 4.20s\n", - "60144 Examples seen. Accuracy:0.6965 Error: 0.69701 Loss:0.81238 Threads: 8 Forward time: 5.01s Backward time: 4.24s Step time: 4.27s\n", - "60784 Examples seen. Accuracy:0.6980 Error: 0.75253 Loss:1.25958 Threads: 8 Forward time: 5.18s Backward time: 4.37s Step time: 4.10s\n", - "61424 Examples seen. Accuracy:0.6960 Error: 0.98763 Loss:1.45467 Threads: 8 Forward time: 4.91s Backward time: 4.10s Step time: 4.83s\n", - "62064 Examples seen. Accuracy:0.6966 Error: 0.76255 Loss:0.83836 Threads: 8 Forward time: 4.93s Backward time: 4.05s Step time: 4.03s\n", - "62704 Examples seen. Accuracy:0.6959 Error: 0.89303 Loss:1.09438 Threads: 8 Forward time: 4.95s Backward time: 4.10s Step time: 4.09s\n", - "63344 Examples seen. Accuracy:0.6963 Error: 0.77112 Loss:0.97534 Threads: 8 Forward time: 4.92s Backward time: 4.13s Step time: 4.09s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "63984 Examples seen. Accuracy:0.7006 Error: 0.61098 Loss:0.79566 Threads: 8 Forward time: 4.93s Backward time: 4.08s Step time: 4.11s\n", - "64624 Examples seen. Accuracy:0.7029 Error: 0.65674 Loss:0.68639 Threads: 8 Forward time: 4.96s Backward time: 4.09s Step time: 4.09s\n", - "65264 Examples seen. Accuracy:0.7006 Error: 0.92591 Loss:1.08386 Threads: 8 Forward time: 5.04s Backward time: 4.11s Step time: 4.27s\n", - "65904 Examples seen. Accuracy:0.7002 Error: 0.88518 Loss:1.12173 Threads: 8 Forward time: 4.96s Backward time: 4.10s Step time: 4.18s\n", - "66544 Examples seen. Accuracy:0.7054 Error: 0.73318 Loss:0.93317 Threads: 8 Forward time: 4.93s Backward time: 4.10s Step time: 4.17s\n", - "67184 Examples seen. Accuracy:0.7081 Error: 0.73602 Loss:0.96279 Threads: 8 Forward time: 4.90s Backward time: 4.14s Step time: 4.19s\n", - "67824 Examples seen. Accuracy:0.7077 Error: 0.82680 Loss:0.98536 Threads: 8 Forward time: 5.00s Backward time: 4.10s Step time: 4.16s\n", - "68464 Examples seen. Accuracy:0.7112 Error: 0.67516 Loss:0.70925 Threads: 8 Forward time: 5.02s Backward time: 4.09s Step time: 4.08s\n", - "69104 Examples seen. Accuracy:0.7110 Error: 0.68307 Loss:0.71865 Threads: 8 Forward time: 4.95s Backward time: 4.05s Step time: 4.09s\n", - "69744 Examples seen. Accuracy:0.7104 Error: 0.76397 Loss:1.00817 Threads: 8 Forward time: 4.96s Backward time: 4.09s Step time: 4.16s\n", - "70384 Examples seen. Accuracy:0.7116 Error: 0.73266 Loss:0.90177 Threads: 8 Forward time: 4.94s Backward time: 4.06s Step time: 4.19s\n", - "71024 Examples seen. Accuracy:0.7127 Error: 0.74932 Loss:0.87967 Threads: 8 Forward time: 4.94s Backward time: 4.08s Step time: 4.11s\n", - "71664 Examples seen. Accuracy:0.7125 Error: 0.73009 Loss:0.87349 Threads: 8 Forward time: 5.00s Backward time: 4.07s Step time: 4.10s\n", - "72304 Examples seen. Accuracy:0.7147 Error: 0.82438 Loss:0.99783 Threads: 8 Forward time: 4.86s Backward time: 4.02s Step time: 4.51s\n", - "72944 Examples seen. Accuracy:0.7186 Error: 0.65873 Loss:0.81989 Threads: 8 Forward time: 4.83s Backward time: 3.98s Step time: 4.01s\n", - "73584 Examples seen. Accuracy:0.7232 Error: 0.70890 Loss:0.68759 Threads: 8 Forward time: 5.04s Backward time: 4.07s Step time: 4.11s\n", - "74224 Examples seen. Accuracy:0.7237 Error: 0.67131 Loss:0.73497 Threads: 8 Forward time: 4.82s Backward time: 4.02s Step time: 4.09s\n", - "74864 Examples seen. Accuracy:0.7241 Error: 0.83505 Loss:1.08028 Threads: 8 Forward time: 4.83s Backward time: 4.00s Step time: 4.02s\n", - "75504 Examples seen. Accuracy:0.7267 Error: 0.71332 Loss:0.80013 Threads: 8 Forward time: 4.89s Backward time: 4.01s Step time: 4.07s\n", - "76144 Examples seen. Accuracy:0.7273 Error: 0.67231 Loss:0.88565 Threads: 8 Forward time: 4.91s Backward time: 4.03s Step time: 4.10s\n", - "76784 Examples seen. Accuracy:0.7307 Error: 0.74971 Loss:0.95142 Threads: 8 Forward time: 5.02s Backward time: 4.02s Step time: 4.04s\n", - "77424 Examples seen. Accuracy:0.7327 Error: 0.59625 Loss:0.59369 Threads: 8 Forward time: 4.89s Backward time: 4.04s Step time: 3.99s\n", - "78064 Examples seen. Accuracy:0.7351 Error: 0.65493 Loss:0.62656 Threads: 8 Forward time: 4.94s Backward time: 4.04s Step time: 4.05s\n", - "78704 Examples seen. Accuracy:0.7381 Error: 0.60676 Loss:0.66600 Threads: 8 Forward time: 4.92s Backward time: 4.08s Step time: 4.16s\n", - "79344 Examples seen. Accuracy:0.7401 Error: 0.62631 Loss:0.77228 Threads: 8 Forward time: 4.95s Backward time: 4.02s Step time: 4.04s\n", - "79984 Examples seen. Accuracy:0.7426 Error: 0.61855 Loss:0.67665 Threads: 8 Forward time: 5.03s Backward time: 4.08s Step time: 4.01s\n", - "80624 Examples seen. Accuracy:0.7413 Error: 0.75523 Loss:0.81710 Threads: 8 Forward time: 4.98s Backward time: 4.06s Step time: 4.04s\n", - "81264 Examples seen. Accuracy:0.7437 Error: 0.65251 Loss:0.87765 Threads: 8 Forward time: 4.96s Backward time: 4.02s Step time: 4.20s\n", - "81904 Examples seen. Accuracy:0.7470 Error: 0.73352 Loss:0.74941 Threads: 8 Forward time: 4.89s Backward time: 4.02s Step time: 4.12s\n", - "82544 Examples seen. Accuracy:0.7486 Error: 0.51316 Loss:0.66584 Threads: 8 Forward time: 4.86s Backward time: 3.98s Step time: 3.99s\n", - "83184 Examples seen. Accuracy:0.7531 Error: 0.52528 Loss:0.55559 Threads: 8 Forward time: 4.98s Backward time: 4.05s Step time: 4.88s\n", - "83824 Examples seen. Accuracy:0.7534 Error: 0.58062 Loss:0.59756 Threads: 8 Forward time: 4.84s Backward time: 4.00s Step time: 4.05s\n", - "84464 Examples seen. Accuracy:0.7512 Error: 0.70739 Loss:1.04535 Threads: 8 Forward time: 4.87s Backward time: 3.98s Step time: 4.00s\n", - "85104 Examples seen. Accuracy:0.7514 Error: 0.71399 Loss:0.82362 Threads: 8 Forward time: 4.91s Backward time: 3.94s Step time: 4.08s\n", - "85744 Examples seen. Accuracy:0.7526 Error: 0.70287 Loss:0.81352 Threads: 8 Forward time: 4.86s Backward time: 3.96s Step time: 4.05s\n", - "86384 Examples seen. Accuracy:0.7515 Error: 0.54848 Loss:0.53062 Threads: 8 Forward time: 4.84s Backward time: 4.00s Step time: 4.03s\n", - "87024 Examples seen. Accuracy:0.7491 Error: 0.86166 Loss:1.14836 Threads: 8 Forward time: 4.82s Backward time: 3.99s Step time: 4.10s\n", - "87664 Examples seen. Accuracy:0.7463 Error: 0.62805 Loss:0.71955 Threads: 8 Forward time: 4.79s Backward time: 3.92s Step time: 4.02s\n", - "88304 Examples seen. Accuracy:0.7449 Error: 0.74902 Loss:0.88322 Threads: 8 Forward time: 4.82s Backward time: 3.98s Step time: 4.06s\n", - "88944 Examples seen. Accuracy:0.7449 Error: 0.55448 Loss:0.55602 Threads: 8 Forward time: 4.93s Backward time: 3.99s Step time: 4.05s\n", - "89584 Examples seen. Accuracy:0.7445 Error: 0.62960 Loss:0.63979 Threads: 8 Forward time: 4.85s Backward time: 3.97s Step time: 4.01s\n", - "90224 Examples seen. Accuracy:0.7453 Error: 0.61505 Loss:0.71534 Threads: 8 Forward time: 5.05s Backward time: 3.99s Step time: 4.29s\n", - "90864 Examples seen. Accuracy:0.7457 Error: 0.71500 Loss:0.84404 Threads: 8 Forward time: 5.09s Backward time: 4.00s Step time: 4.14s\n", - "91504 Examples seen. Accuracy:0.7477 Error: 0.67190 Loss:0.85606 Threads: 8 Forward time: 5.11s Backward time: 4.02s Step time: 4.09s\n", - "92144 Examples seen. Accuracy:0.7488 Error: 0.52931 Loss:0.51678 Threads: 8 Forward time: 5.07s Backward time: 3.99s Step time: 4.05s\n", - "92784 Examples seen. Accuracy:0.7503 Error: 0.58245 Loss:0.64336 Threads: 8 Forward time: 5.06s Backward time: 3.99s Step time: 4.05s\n", - "93424 Examples seen. Accuracy:0.7540 Error: 0.61066 Loss:0.64922 Threads: 8 Forward time: 5.10s Backward time: 3.96s Step time: 4.05s\n", - "94064 Examples seen. Accuracy:0.7587 Error: 0.60802 Loss:0.67975 Threads: 8 Forward time: 5.07s Backward time: 3.99s Step time: 4.05s\n", - "94704 Examples seen. Accuracy:0.7574 Error: 0.76649 Loss:0.90848 Threads: 8 Forward time: 4.99s Backward time: 3.98s Step time: 4.03s\n", - "95344 Examples seen. Accuracy:0.7581 Error: 0.44448 Loss:0.47948 Threads: 8 Forward time: 5.03s Backward time: 3.97s Step time: 3.97s\n", - "95984 Examples seen. Accuracy:0.7602 Error: 0.60725 Loss:0.73486 Threads: 8 Forward time: 4.99s Backward time: 3.95s Step time: 3.99s\n", - "96624 Examples seen. Accuracy:0.7635 Error: 0.46171 Loss:0.54300 Threads: 8 Forward time: 5.00s Backward time: 3.94s Step time: 3.98s\n", - "97264 Examples seen. Accuracy:0.7632 Error: 0.63462 Loss:0.86096 Threads: 8 Forward time: 4.97s Backward time: 3.95s Step time: 3.99s\n", - "97904 Examples seen. Accuracy:0.7615 Error: 0.72525 Loss:0.77724 Threads: 8 Forward time: 5.00s Backward time: 3.93s Step time: 4.04s\n", - "98544 Examples seen. Accuracy:0.7618 Error: 0.58084 Loss:0.56447 Threads: 8 Forward time: 5.03s Backward time: 3.95s Step time: 4.03s\n", - "99184 Examples seen. Accuracy:0.7626 Error: 0.47908 Loss:0.57243 Threads: 8 Forward time: 4.90s Backward time: 3.89s Step time: 4.41s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 2 Examples seen:99808 Validation Accuracy: 0.7913 Validation Error: 0.6032 Validation Loss: 0.6920 Total time: 11.94min\n", - "Epoch time: 5.7 minutes. 100 epochs: 9.6 hours.\n", - "Epochs: 2. Working time: 0.2 hours.\n", - "100448 Examples seen. Accuracy:0.7663 Error: 0.54884 Loss:0.51615 Threads: 8 Forward time: 4.92s Backward time: 3.87s Step time: 3.95s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "101088 Examples seen. Accuracy:0.7685 Error: 0.52692 Loss:0.56441 Threads: 8 Forward time: 4.96s Backward time: 3.90s Step time: 3.95s\n", - "101728 Examples seen. Accuracy:0.7693 Error: 0.64810 Loss:0.80685 Threads: 8 Forward time: 5.08s Backward time: 4.04s Step time: 4.36s\n", - "102368 Examples seen. Accuracy:0.7656 Error: 0.73363 Loss:0.84176 Threads: 8 Forward time: 4.95s Backward time: 3.93s Step time: 3.99s\n", - "103008 Examples seen. Accuracy:0.7651 Error: 0.54348 Loss:0.65500 Threads: 8 Forward time: 4.93s Backward time: 3.90s Step time: 3.93s\n", - "103648 Examples seen. Accuracy:0.7665 Error: 0.51321 Loss:0.50862 Threads: 8 Forward time: 4.92s Backward time: 3.90s Step time: 3.94s\n", - "104288 Examples seen. Accuracy:0.7681 Error: 0.66848 Loss:0.74828 Threads: 8 Forward time: 4.98s Backward time: 3.93s Step time: 3.97s\n", - "104928 Examples seen. Accuracy:0.7679 Error: 0.67335 Loss:0.76467 Threads: 8 Forward time: 4.91s Backward time: 3.91s Step time: 4.01s\n", - "105568 Examples seen. Accuracy:0.7652 Error: 0.70474 Loss:0.73804 Threads: 8 Forward time: 4.95s Backward time: 3.92s Step time: 3.99s\n", - "106208 Examples seen. Accuracy:0.7637 Error: 0.71777 Loss:0.84559 Threads: 8 Forward time: 5.18s Backward time: 4.09s Step time: 3.93s\n", - "106848 Examples seen. Accuracy:0.7608 Error: 0.76632 Loss:0.91273 Threads: 8 Forward time: 5.09s Backward time: 3.94s Step time: 3.98s\n", - "107488 Examples seen. Accuracy:0.7618 Error: 0.58933 Loss:0.64879 Threads: 8 Forward time: 4.97s Backward time: 3.87s Step time: 3.96s\n", - "108128 Examples seen. Accuracy:0.7678 Error: 0.54975 Loss:0.76419 Threads: 8 Forward time: 4.93s Backward time: 3.90s Step time: 4.01s\n", - "108768 Examples seen. Accuracy:0.7701 Error: 0.62359 Loss:0.68389 Threads: 8 Forward time: 4.98s Backward time: 3.89s Step time: 3.98s\n", - "109408 Examples seen. Accuracy:0.7698 Error: 0.50841 Loss:0.60319 Threads: 8 Forward time: 4.95s Backward time: 3.84s Step time: 3.93s\n", - "110048 Examples seen. Accuracy:0.7691 Error: 0.68008 Loss:0.77614 Threads: 8 Forward time: 4.93s Backward time: 3.87s Step time: 3.93s\n", - "110688 Examples seen. Accuracy:0.7697 Error: 0.59959 Loss:0.65718 Threads: 8 Forward time: 4.94s Backward time: 3.90s Step time: 3.96s\n", - "111328 Examples seen. Accuracy:0.7709 Error: 0.64883 Loss:0.79060 Threads: 8 Forward time: 4.95s Backward time: 3.88s Step time: 3.94s\n", - "111968 Examples seen. Accuracy:0.7724 Error: 0.68143 Loss:0.92520 Threads: 8 Forward time: 4.91s Backward time: 3.92s Step time: 3.93s\n", - "112608 Examples seen. Accuracy:0.7744 Error: 0.65300 Loss:0.76157 Threads: 8 Forward time: 4.97s Backward time: 3.87s Step time: 3.95s\n", - "113248 Examples seen. Accuracy:0.7767 Error: 0.29263 Loss:0.33656 Threads: 8 Forward time: 4.92s Backward time: 3.88s Step time: 3.97s\n", - "113888 Examples seen. Accuracy:0.7770 Error: 0.56979 Loss:0.60193 Threads: 8 Forward time: 4.97s Backward time: 3.85s Step time: 3.96s\n", - "114528 Examples seen. Accuracy:0.7797 Error: 0.51112 Loss:0.57280 Threads: 8 Forward time: 4.96s Backward time: 3.88s Step time: 3.97s\n", - "115168 Examples seen. Accuracy:0.7802 Error: 0.46003 Loss:0.53958 Threads: 8 Forward time: 4.96s Backward time: 3.89s Step time: 3.96s\n", - "115808 Examples seen. Accuracy:0.7816 Error: 0.52953 Loss:0.63115 Threads: 8 Forward time: 4.96s Backward time: 3.85s Step time: 3.94s\n", - "116448 Examples seen. Accuracy:0.7825 Error: 0.69798 Loss:0.81466 Threads: 8 Forward time: 4.98s Backward time: 3.90s Step time: 3.93s\n", - "117088 Examples seen. Accuracy:0.7848 Error: 0.57015 Loss:0.72793 Threads: 8 Forward time: 4.90s Backward time: 3.85s Step time: 3.97s\n", - "117728 Examples seen. Accuracy:0.7840 Error: 0.72595 Loss:0.92784 Threads: 8 Forward time: 4.93s Backward time: 3.85s Step time: 3.83s\n", - "118368 Examples seen. Accuracy:0.7844 Error: 0.55089 Loss:0.65124 Threads: 8 Forward time: 4.89s Backward time: 3.85s Step time: 3.84s\n", - "119008 Examples seen. Accuracy:0.7865 Error: 0.63172 Loss:0.71128 Threads: 8 Forward time: 4.88s Backward time: 3.82s Step time: 3.85s\n", - "119648 Examples seen. Accuracy:0.7869 Error: 0.58494 Loss:0.66271 Threads: 8 Forward time: 4.97s Backward time: 3.85s Step time: 3.86s\n", - "120288 Examples seen. Accuracy:0.7882 Error: 0.35686 Loss:0.35653 Threads: 8 Forward time: 4.96s Backward time: 3.86s Step time: 3.94s\n", - "120928 Examples seen. Accuracy:0.7877 Error: 0.62645 Loss:0.75354 Threads: 8 Forward time: 4.96s Backward time: 3.82s Step time: 3.90s\n", - "121568 Examples seen. Accuracy:0.7866 Error: 0.65171 Loss:0.66633 Threads: 8 Forward time: 4.98s Backward time: 3.86s Step time: 4.00s\n", - "122208 Examples seen. Accuracy:0.7883 Error: 0.58689 Loss:0.55934 Threads: 8 Forward time: 4.91s Backward time: 3.85s Step time: 4.33s\n", - "122848 Examples seen. Accuracy:0.7905 Error: 0.50794 Loss:0.65584 Threads: 8 Forward time: 4.90s Backward time: 3.83s Step time: 3.88s\n", - "123488 Examples seen. Accuracy:0.7889 Error: 0.66883 Loss:0.73745 Threads: 8 Forward time: 4.94s Backward time: 3.82s Step time: 3.91s\n", - "124128 Examples seen. Accuracy:0.7890 Error: 0.58316 Loss:0.66858 Threads: 8 Forward time: 4.93s Backward time: 3.81s Step time: 3.85s\n", - "124768 Examples seen. Accuracy:0.7876 Error: 0.60451 Loss:0.64783 Threads: 8 Forward time: 4.96s Backward time: 3.81s Step time: 3.91s\n", - "125408 Examples seen. Accuracy:0.7894 Error: 0.39372 Loss:0.33823 Threads: 8 Forward time: 4.92s Backward time: 3.81s Step time: 3.82s\n", - "126048 Examples seen. Accuracy:0.7903 Error: 0.58704 Loss:0.78061 Threads: 8 Forward time: 4.90s Backward time: 3.82s Step time: 3.90s\n", - "126688 Examples seen. Accuracy:0.7926 Error: 0.38584 Loss:0.52507 Threads: 8 Forward time: 4.97s Backward time: 3.81s Step time: 3.91s\n", - "127328 Examples seen. Accuracy:0.7924 Error: 0.61157 Loss:0.86701 Threads: 8 Forward time: 4.90s Backward time: 3.83s Step time: 3.87s\n", - "127968 Examples seen. Accuracy:0.7938 Error: 0.59949 Loss:0.58580 Threads: 8 Forward time: 4.91s Backward time: 3.79s Step time: 3.86s\n", - "128608 Examples seen. Accuracy:0.7942 Error: 0.54707 Loss:0.78196 Threads: 8 Forward time: 4.95s Backward time: 3.82s Step time: 3.86s\n", - "129248 Examples seen. Accuracy:0.7953 Error: 0.55767 Loss:0.65099 Threads: 8 Forward time: 4.91s Backward time: 3.81s Step time: 3.87s\n", - "129888 Examples seen. Accuracy:0.7952 Error: 0.51269 Loss:0.60067 Threads: 8 Forward time: 4.91s Backward time: 3.76s Step time: 3.87s\n", - "130528 Examples seen. Accuracy:0.7939 Error: 0.59289 Loss:0.70031 Threads: 8 Forward time: 4.94s Backward time: 3.81s Step time: 3.86s\n", - "131168 Examples seen. Accuracy:0.7934 Error: 0.58187 Loss:0.67381 Threads: 8 Forward time: 4.92s Backward time: 3.81s Step time: 3.87s\n", - "131808 Examples seen. Accuracy:0.7942 Error: 0.53611 Loss:0.50868 Threads: 8 Forward time: 4.90s Backward time: 3.75s Step time: 3.85s\n", - "132448 Examples seen. Accuracy:0.7953 Error: 0.54292 Loss:0.63923 Threads: 8 Forward time: 4.93s Backward time: 3.79s Step time: 3.87s\n", - "133088 Examples seen. Accuracy:0.7920 Error: 0.76131 Loss:0.91943 Threads: 8 Forward time: 4.92s Backward time: 3.77s Step time: 3.89s\n", - "133728 Examples seen. Accuracy:0.7912 Error: 0.81032 Loss:1.00075 Threads: 8 Forward time: 4.95s Backward time: 3.76s Step time: 3.91s\n", - "134368 Examples seen. Accuracy:0.7911 Error: 0.59060 Loss:0.63550 Threads: 8 Forward time: 5.03s Backward time: 3.84s Step time: 3.93s\n", - "135008 Examples seen. Accuracy:0.7904 Error: 0.56258 Loss:0.72938 Threads: 8 Forward time: 4.94s Backward time: 3.78s Step time: 3.90s\n", - "135648 Examples seen. Accuracy:0.7923 Error: 0.60513 Loss:0.79000 Threads: 8 Forward time: 4.94s Backward time: 3.77s Step time: 3.89s\n", - "136288 Examples seen. Accuracy:0.7913 Error: 0.64303 Loss:0.67180 Threads: 8 Forward time: 4.95s Backward time: 3.79s Step time: 3.91s\n", - "136928 Examples seen. Accuracy:0.7932 Error: 0.52664 Loss:0.60806 Threads: 8 Forward time: 5.03s Backward time: 3.79s Step time: 3.91s\n", - "137568 Examples seen. Accuracy:0.7932 Error: 0.44781 Loss:0.44149 Threads: 8 Forward time: 4.92s Backward time: 3.78s Step time: 3.89s\n", - "138208 Examples seen. Accuracy:0.7932 Error: 0.54784 Loss:0.62629 Threads: 8 Forward time: 4.95s Backward time: 3.77s Step time: 3.90s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "138848 Examples seen. Accuracy:0.7925 Error: 0.53230 Loss:0.58262 Threads: 8 Forward time: 4.91s Backward time: 3.78s Step time: 3.94s\n", - "139488 Examples seen. Accuracy:0.7935 Error: 0.44143 Loss:0.56986 Threads: 8 Forward time: 4.91s Backward time: 3.76s Step time: 3.91s\n", - "140128 Examples seen. Accuracy:0.7932 Error: 0.60223 Loss:0.62486 Threads: 8 Forward time: 4.95s Backward time: 3.77s Step time: 3.90s\n", - "140768 Examples seen. Accuracy:0.7972 Error: 0.43024 Loss:0.37385 Threads: 8 Forward time: 4.92s Backward time: 3.76s Step time: 3.92s\n", - "141408 Examples seen. Accuracy:0.7999 Error: 0.64740 Loss:0.74151 Threads: 8 Forward time: 4.91s Backward time: 3.80s Step time: 3.90s\n", - "142048 Examples seen. Accuracy:0.8018 Error: 0.46514 Loss:0.64103 Threads: 8 Forward time: 5.02s Backward time: 3.81s Step time: 3.91s\n", - "142688 Examples seen. Accuracy:0.8055 Error: 0.46350 Loss:0.49218 Threads: 8 Forward time: 5.05s Backward time: 3.81s Step time: 3.95s\n", - "143328 Examples seen. Accuracy:0.8052 Error: 0.44838 Loss:0.43140 Threads: 8 Forward time: 4.99s Backward time: 3.78s Step time: 3.93s\n", - "143968 Examples seen. Accuracy:0.8067 Error: 0.45505 Loss:0.45727 Threads: 8 Forward time: 4.95s Backward time: 3.76s Step time: 3.89s\n", - "144608 Examples seen. Accuracy:0.8067 Error: 0.60553 Loss:0.92126 Threads: 8 Forward time: 4.94s Backward time: 3.75s Step time: 3.90s\n", - "145248 Examples seen. Accuracy:0.8047 Error: 0.58275 Loss:0.61992 Threads: 8 Forward time: 4.91s Backward time: 3.77s Step time: 3.88s\n", - "145888 Examples seen. Accuracy:0.8063 Error: 0.65016 Loss:0.74546 Threads: 8 Forward time: 4.94s Backward time: 3.74s Step time: 3.98s\n", - "146528 Examples seen. Accuracy:0.8090 Error: 0.49883 Loss:0.48952 Threads: 8 Forward time: 5.04s Backward time: 3.78s Step time: 3.88s\n", - "147168 Examples seen. Accuracy:0.8094 Error: 0.72466 Loss:0.94149 Threads: 8 Forward time: 4.99s Backward time: 3.75s Step time: 3.90s\n", - "147808 Examples seen. Accuracy:0.8101 Error: 0.43779 Loss:0.39046 Threads: 8 Forward time: 5.00s Backward time: 3.77s Step time: 3.90s\n", - "148448 Examples seen. Accuracy:0.8103 Error: 0.47540 Loss:0.51732 Threads: 8 Forward time: 5.01s Backward time: 3.73s Step time: 3.91s\n", - "149088 Examples seen. Accuracy:0.8125 Error: 0.61349 Loss:0.62172 Threads: 8 Forward time: 5.03s Backward time: 3.77s Step time: 3.91s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 3 Examples seen:149712 Validation Accuracy: 0.8145 Validation Error: 0.5479 Validation Loss: 0.6061 Total time: 17.52min\n", - "Epoch time: 5.1 minutes. 100 epochs: 8.5 hours.\n", - "Epochs: 3. Working time: 0.29 hours.\n", - "150352 Examples seen. Accuracy:0.8192 Error: 0.39216 Loss:0.43250 Threads: 8 Forward time: 5.00s Backward time: 3.74s Step time: 3.89s\n", - "150992 Examples seen. Accuracy:0.8173 Error: 0.50644 Loss:0.56953 Threads: 8 Forward time: 4.89s Backward time: 3.75s Step time: 3.86s\n", - "151632 Examples seen. Accuracy:0.8200 Error: 0.41418 Loss:0.52233 Threads: 8 Forward time: 4.90s Backward time: 3.72s Step time: 3.82s\n", - "152272 Examples seen. Accuracy:0.8235 Error: 0.45752 Loss:0.45199 Threads: 8 Forward time: 4.90s Backward time: 3.71s Step time: 3.82s\n", - "152912 Examples seen. Accuracy:0.8249 Error: 0.52087 Loss:0.82781 Threads: 8 Forward time: 4.91s Backward time: 3.72s Step time: 3.81s\n", - "153552 Examples seen. Accuracy:0.8240 Error: 0.53475 Loss:0.59838 Threads: 8 Forward time: 4.93s Backward time: 3.71s Step time: 3.82s\n", - "154192 Examples seen. Accuracy:0.8233 Error: 0.55490 Loss:0.79987 Threads: 8 Forward time: 4.92s Backward time: 3.68s Step time: 3.80s\n", - "154832 Examples seen. Accuracy:0.8238 Error: 0.58514 Loss:0.67710 Threads: 8 Forward time: 4.92s Backward time: 3.68s Step time: 3.82s\n", - "155472 Examples seen. Accuracy:0.8237 Error: 0.53179 Loss:0.49336 Threads: 8 Forward time: 4.96s Backward time: 3.73s Step time: 3.83s\n", - "156112 Examples seen. Accuracy:0.8234 Error: 0.44285 Loss:0.52057 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 3.84s\n", - "156752 Examples seen. Accuracy:0.8267 Error: 0.47392 Loss:0.62191 Threads: 8 Forward time: 4.93s Backward time: 3.76s Step time: 3.82s\n", - "157392 Examples seen. Accuracy:0.8227 Error: 0.60360 Loss:0.71726 Threads: 8 Forward time: 5.02s Backward time: 3.75s Step time: 3.84s\n", - "158032 Examples seen. Accuracy:0.8223 Error: 0.58812 Loss:0.65584 Threads: 8 Forward time: 4.92s Backward time: 3.71s Step time: 3.84s\n", - "158672 Examples seen. Accuracy:0.8214 Error: 0.50556 Loss:0.51089 Threads: 8 Forward time: 4.96s Backward time: 3.78s Step time: 3.83s\n", - "159312 Examples seen. Accuracy:0.8203 Error: 0.54778 Loss:0.63136 Threads: 8 Forward time: 4.91s Backward time: 3.71s Step time: 3.83s\n", - "159952 Examples seen. Accuracy:0.8208 Error: 0.52241 Loss:0.57624 Threads: 8 Forward time: 4.88s Backward time: 3.72s Step time: 3.82s\n", - "160592 Examples seen. Accuracy:0.8207 Error: 0.53627 Loss:0.59279 Threads: 8 Forward time: 4.96s Backward time: 3.74s Step time: 3.82s\n", - "161232 Examples seen. Accuracy:0.8193 Error: 0.60742 Loss:0.69559 Threads: 8 Forward time: 4.95s Backward time: 3.75s Step time: 3.82s\n", - "161872 Examples seen. Accuracy:0.8202 Error: 0.42806 Loss:0.45953 Threads: 8 Forward time: 4.89s Backward time: 3.75s Step time: 3.83s\n", - "162512 Examples seen. Accuracy:0.8204 Error: 0.35194 Loss:0.29541 Threads: 8 Forward time: 4.92s Backward time: 3.71s Step time: 3.81s\n", - "163152 Examples seen. Accuracy:0.8210 Error: 0.36569 Loss:0.32161 Threads: 8 Forward time: 4.89s Backward time: 3.73s Step time: 3.83s\n", - "163792 Examples seen. Accuracy:0.8209 Error: 0.54824 Loss:0.51071 Threads: 8 Forward time: 4.91s Backward time: 3.71s Step time: 3.80s\n", - "164432 Examples seen. Accuracy:0.8245 Error: 0.40903 Loss:0.52589 Threads: 8 Forward time: 4.89s Backward time: 3.71s Step time: 3.81s\n", - "165072 Examples seen. Accuracy:0.8250 Error: 0.47508 Loss:0.55595 Threads: 8 Forward time: 4.93s Backward time: 3.69s Step time: 3.82s\n", - "165712 Examples seen. Accuracy:0.8256 Error: 0.41772 Loss:0.44002 Threads: 8 Forward time: 4.98s Backward time: 3.74s Step time: 3.84s\n", - "166352 Examples seen. Accuracy:0.8258 Error: 0.55670 Loss:0.67870 Threads: 8 Forward time: 4.94s Backward time: 3.75s Step time: 3.86s\n", - "166992 Examples seen. Accuracy:0.8264 Error: 0.55651 Loss:0.66158 Threads: 8 Forward time: 4.97s Backward time: 3.74s Step time: 3.84s\n", - "167632 Examples seen. Accuracy:0.8291 Error: 0.38208 Loss:0.38273 Threads: 8 Forward time: 5.00s Backward time: 3.74s Step time: 3.86s\n", - "168272 Examples seen. Accuracy:0.8282 Error: 0.69786 Loss:0.82405 Threads: 8 Forward time: 4.98s Backward time: 3.73s Step time: 3.90s\n", - "168912 Examples seen. Accuracy:0.8275 Error: 0.47987 Loss:0.50884 Threads: 8 Forward time: 4.94s Backward time: 3.74s Step time: 3.90s\n", - "169552 Examples seen. Accuracy:0.8271 Error: 0.55535 Loss:0.57426 Threads: 8 Forward time: 4.94s Backward time: 3.72s Step time: 3.89s\n", - "170192 Examples seen. Accuracy:0.8283 Error: 0.59648 Loss:0.75638 Threads: 8 Forward time: 5.09s Backward time: 3.74s Step time: 3.91s\n", - "170832 Examples seen. Accuracy:0.8284 Error: 0.45573 Loss:0.40775 Threads: 8 Forward time: 4.97s Backward time: 3.75s Step time: 3.90s\n", - "171472 Examples seen. Accuracy:0.8276 Error: 0.66652 Loss:0.89765 Threads: 8 Forward time: 4.97s Backward time: 3.77s Step time: 3.90s\n", - "172112 Examples seen. Accuracy:0.8276 Error: 0.60761 Loss:0.80383 Threads: 8 Forward time: 4.98s Backward time: 3.74s Step time: 3.89s\n", - "172752 Examples seen. Accuracy:0.8279 Error: 0.33955 Loss:0.31424 Threads: 8 Forward time: 5.02s Backward time: 3.70s Step time: 3.89s\n", - "173392 Examples seen. Accuracy:0.8285 Error: 0.47791 Loss:0.57737 Threads: 8 Forward time: 5.01s Backward time: 3.70s Step time: 3.89s\n", - "174032 Examples seen. Accuracy:0.8277 Error: 0.56284 Loss:0.63809 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 4.27s\n", - "174672 Examples seen. Accuracy:0.8274 Error: 0.46905 Loss:0.63722 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 3.78s\n", - "175312 Examples seen. Accuracy:0.8308 Error: 0.40847 Loss:0.49759 Threads: 8 Forward time: 4.89s Backward time: 3.72s Step time: 3.78s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "175952 Examples seen. Accuracy:0.8299 Error: 0.49338 Loss:0.63578 Threads: 8 Forward time: 4.90s Backward time: 3.72s Step time: 3.78s\n", - "176592 Examples seen. Accuracy:0.8316 Error: 0.42440 Loss:0.40695 Threads: 8 Forward time: 4.91s Backward time: 3.72s Step time: 3.78s\n", - "177232 Examples seen. Accuracy:0.8304 Error: 0.47983 Loss:0.89970 Threads: 8 Forward time: 4.92s Backward time: 3.70s Step time: 3.78s\n", - "177872 Examples seen. Accuracy:0.8294 Error: 0.43695 Loss:0.46112 Threads: 8 Forward time: 4.93s Backward time: 3.68s Step time: 3.79s\n", - "178512 Examples seen. Accuracy:0.8313 Error: 0.46624 Loss:0.46886 Threads: 8 Forward time: 4.91s Backward time: 3.69s Step time: 3.77s\n", - "179152 Examples seen. Accuracy:0.8285 Error: 0.51522 Loss:0.62947 Threads: 8 Forward time: 4.90s Backward time: 3.68s Step time: 3.78s\n", - "179792 Examples seen. Accuracy:0.8291 Error: 0.49048 Loss:0.63040 Threads: 8 Forward time: 4.91s Backward time: 3.70s Step time: 3.77s\n", - "180432 Examples seen. Accuracy:0.8286 Error: 0.43082 Loss:0.45422 Threads: 8 Forward time: 4.88s Backward time: 3.66s Step time: 3.76s\n", - "181072 Examples seen. Accuracy:0.8300 Error: 0.26881 Loss:0.25330 Threads: 8 Forward time: 4.91s Backward time: 3.70s Step time: 3.77s\n", - "181712 Examples seen. Accuracy:0.8288 Error: 0.67797 Loss:0.73267 Threads: 8 Forward time: 4.90s Backward time: 3.68s Step time: 3.75s\n", - "182352 Examples seen. Accuracy:0.8250 Error: 0.43118 Loss:0.47404 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 3.79s\n", - "182992 Examples seen. Accuracy:0.8237 Error: 0.71151 Loss:0.96095 Threads: 8 Forward time: 4.97s Backward time: 3.65s Step time: 3.81s\n", - "183632 Examples seen. Accuracy:0.8236 Error: 0.51908 Loss:0.66391 Threads: 8 Forward time: 4.92s Backward time: 3.62s Step time: 3.79s\n", - "184272 Examples seen. Accuracy:0.8243 Error: 0.50184 Loss:0.55106 Threads: 8 Forward time: 4.88s Backward time: 3.65s Step time: 3.77s\n", - "184912 Examples seen. Accuracy:0.8263 Error: 0.36270 Loss:0.37580 Threads: 8 Forward time: 4.92s Backward time: 3.65s Step time: 3.75s\n", - "185552 Examples seen. Accuracy:0.8250 Error: 0.59277 Loss:0.83979 Threads: 8 Forward time: 8.22s Backward time: 5.47s Step time: 4.10s\n", - "186192 Examples seen. Accuracy:0.8270 Error: 0.41037 Loss:0.42193 Threads: 8 Forward time: 4.91s Backward time: 3.67s Step time: 3.79s\n", - "186832 Examples seen. Accuracy:0.8288 Error: 0.39750 Loss:0.36253 Threads: 8 Forward time: 4.92s Backward time: 3.67s Step time: 3.78s\n", - "187472 Examples seen. Accuracy:0.8324 Error: 0.38362 Loss:0.48083 Threads: 8 Forward time: 4.88s Backward time: 3.65s Step time: 3.79s\n", - "188112 Examples seen. Accuracy:0.8322 Error: 0.42722 Loss:0.44788 Threads: 8 Forward time: 4.92s Backward time: 3.61s Step time: 3.80s\n", - "188752 Examples seen. Accuracy:0.8327 Error: 0.38282 Loss:0.32241 Threads: 8 Forward time: 4.96s Backward time: 3.63s Step time: 3.77s\n", - "189392 Examples seen. Accuracy:0.8343 Error: 0.43995 Loss:0.48567 Threads: 8 Forward time: 4.92s Backward time: 3.63s Step time: 3.78s\n", - "190032 Examples seen. Accuracy:0.8346 Error: 0.40010 Loss:0.34967 Threads: 8 Forward time: 4.90s Backward time: 3.63s Step time: 3.77s\n", - "190672 Examples seen. Accuracy:0.8348 Error: 0.31902 Loss:0.36368 Threads: 8 Forward time: 4.91s Backward time: 3.64s Step time: 3.77s\n", - "191312 Examples seen. Accuracy:0.8367 Error: 0.38642 Loss:0.43329 Threads: 8 Forward time: 4.91s Backward time: 3.67s Step time: 3.79s\n", - "191952 Examples seen. Accuracy:0.8349 Error: 0.48097 Loss:0.70807 Threads: 8 Forward time: 4.90s Backward time: 3.65s Step time: 3.78s\n", - "192592 Examples seen. Accuracy:0.8365 Error: 0.35821 Loss:0.43316 Threads: 8 Forward time: 4.92s Backward time: 3.63s Step time: 3.79s\n", - "193232 Examples seen. Accuracy:0.8345 Error: 0.42352 Loss:0.42006 Threads: 8 Forward time: 4.94s Backward time: 3.58s Step time: 3.78s\n", - "193872 Examples seen. Accuracy:0.8347 Error: 0.47412 Loss:0.54623 Threads: 8 Forward time: 4.91s Backward time: 3.64s Step time: 3.77s\n", - "194512 Examples seen. Accuracy:0.8364 Error: 0.55156 Loss:0.53484 Threads: 8 Forward time: 5.10s Backward time: 3.69s Step time: 3.80s\n", - "195152 Examples seen. Accuracy:0.8372 Error: 0.50308 Loss:0.51425 Threads: 8 Forward time: 4.93s Backward time: 3.61s Step time: 3.81s\n", - "195792 Examples seen. Accuracy:0.8387 Error: 0.37102 Loss:0.31270 Threads: 8 Forward time: 5.04s Backward time: 3.68s Step time: 3.80s\n", - "196432 Examples seen. Accuracy:0.8386 Error: 0.35134 Loss:0.44519 Threads: 8 Forward time: 4.93s Backward time: 3.65s Step time: 3.79s\n", - "197072 Examples seen. Accuracy:0.8416 Error: 0.32214 Loss:0.37739 Threads: 8 Forward time: 4.94s Backward time: 3.62s Step time: 3.79s\n", - "197712 Examples seen. Accuracy:0.8406 Error: 0.34171 Loss:0.43089 Threads: 8 Forward time: 4.91s Backward time: 3.63s Step time: 3.78s\n", - "198352 Examples seen. Accuracy:0.8400 Error: 0.47906 Loss:0.63858 Threads: 8 Forward time: 4.91s Backward time: 3.63s Step time: 3.81s\n", - "198992 Examples seen. Accuracy:0.8406 Error: 0.41359 Loss:0.45117 Threads: 8 Forward time: 5.00s Backward time: 3.66s Step time: 3.84s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 4 Examples seen:199616 Validation Accuracy: 0.8388 Validation Error: 0.4966 Validation Loss: 0.5339 Total time: 22.97min\n", - "Epoch time: 5 minutes. 100 epochs: 8.3 hours.\n", - "Epochs: 4. Working time: 0.38 hours.\n", - "200256 Examples seen. Accuracy:0.8431 Error: 0.47344 Loss:0.42972 Threads: 8 Forward time: 4.89s Backward time: 3.66s Step time: 3.80s\n", - "200896 Examples seen. Accuracy:0.8464 Error: 0.31460 Loss:0.25577 Threads: 8 Forward time: 5.03s Backward time: 3.74s Step time: 3.82s\n", - "201536 Examples seen. Accuracy:0.8462 Error: 0.64650 Loss:0.69419 Threads: 8 Forward time: 4.92s Backward time: 3.64s Step time: 3.81s\n", - "202176 Examples seen. Accuracy:0.8464 Error: 0.36548 Loss:0.38364 Threads: 8 Forward time: 4.90s Backward time: 3.63s Step time: 3.82s\n", - "202816 Examples seen. Accuracy:0.8455 Error: 0.38595 Loss:0.40465 Threads: 8 Forward time: 4.96s Backward time: 3.61s Step time: 3.76s\n", - "203456 Examples seen. Accuracy:0.8436 Error: 0.40913 Loss:0.38274 Threads: 8 Forward time: 4.91s Backward time: 3.63s Step time: 3.78s\n", - "204096 Examples seen. Accuracy:0.8421 Error: 0.47723 Loss:0.51854 Threads: 8 Forward time: 4.90s Backward time: 3.60s Step time: 3.75s\n", - "204736 Examples seen. Accuracy:0.8417 Error: 0.42170 Loss:0.45629 Threads: 8 Forward time: 4.90s Backward time: 3.63s Step time: 3.78s\n", - "205376 Examples seen. Accuracy:0.8405 Error: 0.48133 Loss:0.55698 Threads: 8 Forward time: 4.92s Backward time: 3.62s Step time: 3.79s\n", - "206016 Examples seen. Accuracy:0.8415 Error: 0.51225 Loss:0.55395 Threads: 8 Forward time: 4.90s Backward time: 3.62s Step time: 3.78s\n", - "206656 Examples seen. Accuracy:0.8419 Error: 0.52573 Loss:0.54872 Threads: 8 Forward time: 4.91s Backward time: 3.61s Step time: 3.79s\n", - "207296 Examples seen. Accuracy:0.8407 Error: 0.46051 Loss:0.49051 Threads: 8 Forward time: 4.92s Backward time: 3.59s Step time: 3.79s\n", - "207936 Examples seen. Accuracy:0.8413 Error: 0.39777 Loss:0.45502 Threads: 8 Forward time: 4.90s Backward time: 3.59s Step time: 3.91s\n", - "208576 Examples seen. Accuracy:0.8424 Error: 0.35375 Loss:0.31353 Threads: 8 Forward time: 4.92s Backward time: 3.58s Step time: 3.84s\n", - "209216 Examples seen. Accuracy:0.8441 Error: 0.36028 Loss:0.41186 Threads: 8 Forward time: 4.91s Backward time: 3.59s Step time: 3.84s\n", - "209856 Examples seen. Accuracy:0.8442 Error: 0.46064 Loss:0.47827 Threads: 8 Forward time: 6.18s Backward time: 4.40s Step time: 4.22s\n", - "210496 Examples seen. Accuracy:0.8439 Error: 0.35342 Loss:0.30903 Threads: 8 Forward time: 4.94s Backward time: 3.66s Step time: 4.56s\n", - "211136 Examples seen. Accuracy:0.8428 Error: 0.43144 Loss:0.57266 Threads: 8 Forward time: 4.92s Backward time: 3.58s Step time: 3.75s\n", - "211776 Examples seen. Accuracy:0.8435 Error: 0.45110 Loss:0.39395 Threads: 8 Forward time: 4.92s Backward time: 3.58s Step time: 3.77s\n", - "212416 Examples seen. Accuracy:0.8431 Error: 0.29183 Loss:0.32461 Threads: 8 Forward time: 4.95s Backward time: 3.59s Step time: 3.76s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "213056 Examples seen. Accuracy:0.8425 Error: 0.44417 Loss:0.50224 Threads: 8 Forward time: 4.96s Backward time: 3.57s Step time: 3.76s\n", - "213696 Examples seen. Accuracy:0.8430 Error: 0.44293 Loss:0.53411 Threads: 8 Forward time: 4.92s Backward time: 3.55s Step time: 3.74s\n", - "214336 Examples seen. Accuracy:0.8440 Error: 0.66991 Loss:0.95960 Threads: 8 Forward time: 4.94s Backward time: 3.59s Step time: 3.74s\n", - "214976 Examples seen. Accuracy:0.8467 Error: 0.45656 Loss:0.44948 Threads: 8 Forward time: 4.93s Backward time: 3.56s Step time: 3.74s\n", - "215616 Examples seen. Accuracy:0.8465 Error: 0.41955 Loss:0.40876 Threads: 8 Forward time: 4.92s Backward time: 3.55s Step time: 3.73s\n", - "216256 Examples seen. Accuracy:0.8477 Error: 0.41120 Loss:0.44879 Threads: 8 Forward time: 4.93s Backward time: 3.52s Step time: 3.74s\n", - "216896 Examples seen. Accuracy:0.8490 Error: 0.33548 Loss:0.28479 Threads: 8 Forward time: 4.91s Backward time: 3.59s Step time: 3.76s\n", - "217536 Examples seen. Accuracy:0.8495 Error: 0.44548 Loss:0.53520 Threads: 8 Forward time: 4.92s Backward time: 3.60s Step time: 3.74s\n", - "218176 Examples seen. Accuracy:0.8495 Error: 0.42480 Loss:0.45789 Threads: 8 Forward time: 4.99s Backward time: 3.67s Step time: 3.76s\n", - "218816 Examples seen. Accuracy:0.8500 Error: 0.36274 Loss:0.32787 Threads: 8 Forward time: 5.00s Backward time: 3.60s Step time: 3.79s\n", - "219456 Examples seen. Accuracy:0.8476 Error: 0.46745 Loss:0.53919 Threads: 8 Forward time: 4.96s Backward time: 3.60s Step time: 3.76s\n", - "220096 Examples seen. Accuracy:0.8470 Error: 0.52822 Loss:0.65375 Threads: 8 Forward time: 4.93s Backward time: 3.56s Step time: 3.74s\n", - "220736 Examples seen. Accuracy:0.8463 Error: 0.38752 Loss:0.57444 Threads: 8 Forward time: 4.95s Backward time: 3.60s Step time: 3.75s\n", - "221376 Examples seen. Accuracy:0.8485 Error: 0.32242 Loss:0.40611 Threads: 8 Forward time: 4.94s Backward time: 3.59s Step time: 3.75s\n", - "222016 Examples seen. Accuracy:0.8511 Error: 0.36085 Loss:0.38120 Threads: 8 Forward time: 4.98s Backward time: 3.64s Step time: 3.76s\n", - "222656 Examples seen. Accuracy:0.8518 Error: 0.32258 Loss:0.28939 Threads: 8 Forward time: 4.94s Backward time: 3.58s Step time: 3.75s\n", - "223296 Examples seen. Accuracy:0.8504 Error: 0.54130 Loss:0.66737 Threads: 8 Forward time: 4.97s Backward time: 3.60s Step time: 3.77s\n", - "223936 Examples seen. Accuracy:0.8485 Error: 0.31742 Loss:0.31574 Threads: 8 Forward time: 4.96s Backward time: 3.56s Step time: 3.80s\n", - "224576 Examples seen. Accuracy:0.8506 Error: 0.41915 Loss:0.39697 Threads: 8 Forward time: 4.98s Backward time: 3.56s Step time: 3.76s\n", - "225216 Examples seen. Accuracy:0.8527 Error: 0.31546 Loss:0.30623 Threads: 8 Forward time: 5.04s Backward time: 3.60s Step time: 3.79s\n", - "225856 Examples seen. Accuracy:0.8511 Error: 0.46154 Loss:0.46196 Threads: 8 Forward time: 4.93s Backward time: 3.56s Step time: 3.78s\n", - "226496 Examples seen. Accuracy:0.8537 Error: 0.23920 Loss:0.17325 Threads: 8 Forward time: 5.03s Backward time: 3.58s Step time: 3.76s\n", - "227136 Examples seen. Accuracy:0.8552 Error: 0.35634 Loss:0.44460 Threads: 8 Forward time: 4.93s Backward time: 3.58s Step time: 4.35s\n", - "227776 Examples seen. Accuracy:0.8554 Error: 0.30531 Loss:0.25647 Threads: 8 Forward time: 4.91s Backward time: 3.57s Step time: 3.79s\n", - "228416 Examples seen. Accuracy:0.8555 Error: 0.30167 Loss:0.35479 Threads: 8 Forward time: 4.92s Backward time: 3.56s Step time: 3.79s\n", - "229056 Examples seen. Accuracy:0.8563 Error: 0.39529 Loss:0.40308 Threads: 8 Forward time: 5.01s Backward time: 3.60s Step time: 3.79s\n", - "229696 Examples seen. Accuracy:0.8571 Error: 0.48068 Loss:0.55256 Threads: 8 Forward time: 4.92s Backward time: 3.59s Step time: 3.80s\n", - "230336 Examples seen. Accuracy:0.8592 Error: 0.25813 Loss:0.29048 Threads: 8 Forward time: 4.93s Backward time: 3.58s Step time: 3.78s\n", - "230976 Examples seen. Accuracy:0.8586 Error: 0.32898 Loss:0.32639 Threads: 8 Forward time: 4.94s Backward time: 3.57s Step time: 3.79s\n", - "231616 Examples seen. Accuracy:0.8603 Error: 0.27744 Loss:0.30155 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.79s\n", - "232256 Examples seen. Accuracy:0.8605 Error: 0.38925 Loss:0.48196 Threads: 8 Forward time: 4.94s Backward time: 3.55s Step time: 3.79s\n", - "232896 Examples seen. Accuracy:0.8612 Error: 0.35306 Loss:0.36334 Threads: 8 Forward time: 4.92s Backward time: 3.57s Step time: 3.79s\n", - "233536 Examples seen. Accuracy:0.8614 Error: 0.30785 Loss:0.69586 Threads: 8 Forward time: 4.94s Backward time: 3.53s Step time: 3.80s\n", - "234176 Examples seen. Accuracy:0.8609 Error: 0.33963 Loss:0.37742 Threads: 8 Forward time: 4.93s Backward time: 3.60s Step time: 3.83s\n", - "234816 Examples seen. Accuracy:0.8608 Error: 0.41324 Loss:0.37596 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.81s\n", - "235456 Examples seen. Accuracy:0.8604 Error: 0.53449 Loss:0.65127 Threads: 8 Forward time: 4.96s Backward time: 3.55s Step time: 3.80s\n", - "236096 Examples seen. Accuracy:0.8581 Error: 0.34933 Loss:0.38058 Threads: 8 Forward time: 4.95s Backward time: 3.54s Step time: 3.87s\n", - "236736 Examples seen. Accuracy:0.8594 Error: 0.39007 Loss:0.39326 Threads: 8 Forward time: 5.02s Backward time: 3.55s Step time: 3.86s\n", - "237376 Examples seen. Accuracy:0.8580 Error: 0.46112 Loss:0.61066 Threads: 8 Forward time: 4.96s Backward time: 3.51s Step time: 3.76s\n", - "238016 Examples seen. Accuracy:0.8592 Error: 0.36977 Loss:0.41938 Threads: 8 Forward time: 4.96s Backward time: 3.54s Step time: 3.80s\n", - "238656 Examples seen. Accuracy:0.8589 Error: 0.34850 Loss:0.36165 Threads: 8 Forward time: 4.94s Backward time: 3.55s Step time: 3.81s\n", - "239296 Examples seen. Accuracy:0.8588 Error: 0.28783 Loss:0.24266 Threads: 8 Forward time: 4.94s Backward time: 3.55s Step time: 3.81s\n", - "239936 Examples seen. Accuracy:0.8589 Error: 0.43056 Loss:0.34941 Threads: 8 Forward time: 4.99s Backward time: 3.59s Step time: 3.81s\n", - "240576 Examples seen. Accuracy:0.8558 Error: 0.49845 Loss:0.62678 Threads: 8 Forward time: 5.50s Backward time: 3.89s Step time: 3.93s\n", - "241216 Examples seen. Accuracy:0.8551 Error: 0.27495 Loss:0.23386 Threads: 8 Forward time: 6.22s Backward time: 4.29s Step time: 5.39s\n", - "241856 Examples seen. Accuracy:0.8554 Error: 0.32550 Loss:0.33272 Threads: 8 Forward time: 5.54s Backward time: 3.94s Step time: 5.53s\n", - "242496 Examples seen. Accuracy:0.8577 Error: 0.39306 Loss:0.51455 Threads: 8 Forward time: 6.09s Backward time: 4.27s Step time: 4.71s\n", - "243136 Examples seen. Accuracy:0.8602 Error: 0.38005 Loss:0.45507 Threads: 8 Forward time: 6.67s Backward time: 4.69s Step time: 5.48s\n", - "243776 Examples seen. Accuracy:0.8616 Error: 0.30631 Loss:0.31517 Threads: 8 Forward time: 5.58s Backward time: 4.02s Step time: 5.39s\n", - "244416 Examples seen. Accuracy:0.8621 Error: 0.45326 Loss:0.48289 Threads: 8 Forward time: 6.05s Backward time: 4.18s Step time: 5.48s\n", - "245056 Examples seen. Accuracy:0.8606 Error: 0.45040 Loss:0.51158 Threads: 8 Forward time: 6.01s Backward time: 4.16s Step time: 5.62s\n", - "245696 Examples seen. Accuracy:0.8564 Error: 0.33490 Loss:0.30871 Threads: 8 Forward time: 6.69s Backward time: 4.64s Step time: 5.67s\n", - "246336 Examples seen. Accuracy:0.8551 Error: 0.47371 Loss:0.44888 Threads: 8 Forward time: 6.26s Backward time: 4.45s Step time: 5.47s\n", - "246976 Examples seen. Accuracy:0.8571 Error: 0.27831 Loss:0.27319 Threads: 8 Forward time: 5.62s Backward time: 3.95s Step time: 5.29s\n", - "247616 Examples seen. Accuracy:0.8572 Error: 0.24168 Loss:0.19918 Threads: 8 Forward time: 5.97s Backward time: 4.22s Step time: 5.21s\n", - "248256 Examples seen. Accuracy:0.8592 Error: 0.40101 Loss:0.47751 Threads: 8 Forward time: 5.83s Backward time: 4.04s Step time: 5.53s\n", - "248896 Examples seen. Accuracy:0.8572 Error: 0.49252 Loss:0.70675 Threads: 8 Forward time: 5.78s Backward time: 4.03s Step time: 5.25s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 5 Examples seen:249520 Validation Accuracy: 0.8529 Validation Error: 0.4536 Validation Loss: 0.4699 Total time: 28.82min\n", - "Epoch time: 6.8 minutes. 100 epochs: 11 hours.\n", - "Epochs: 5. Working time: 0.48 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "250160 Examples seen. Accuracy:0.8557 Error: 0.41416 Loss:0.61978 Threads: 8 Forward time: 6.00s Backward time: 4.18s Step time: 5.61s\n", - "250800 Examples seen. Accuracy:0.8554 Error: 0.47289 Loss:0.61539 Threads: 8 Forward time: 5.89s Backward time: 4.12s Step time: 5.41s\n", - "251440 Examples seen. Accuracy:0.8546 Error: 0.56265 Loss:0.61656 Threads: 8 Forward time: 6.12s Backward time: 4.34s Step time: 5.55s\n", - "252080 Examples seen. Accuracy:0.8535 Error: 0.40453 Loss:0.44082 Threads: 8 Forward time: 6.81s Backward time: 4.65s Step time: 5.44s\n", - "252720 Examples seen. Accuracy:0.8529 Error: 0.40592 Loss:0.45493 Threads: 8 Forward time: 6.13s Backward time: 4.28s Step time: 5.26s\n", - "253360 Examples seen. Accuracy:0.8540 Error: 0.34238 Loss:0.41759 Threads: 8 Forward time: 6.27s Backward time: 4.35s Step time: 5.52s\n", - "254000 Examples seen. Accuracy:0.8550 Error: 0.39450 Loss:0.40056 Threads: 8 Forward time: 5.68s Backward time: 4.04s Step time: 5.38s\n", - "254640 Examples seen. Accuracy:0.8573 Error: 0.49733 Loss:0.66362 Threads: 8 Forward time: 6.38s Backward time: 4.51s Step time: 5.67s\n", - "255280 Examples seen. Accuracy:0.8578 Error: 0.42118 Loss:0.43178 Threads: 8 Forward time: 5.71s Backward time: 4.07s Step time: 5.40s\n", - "255920 Examples seen. Accuracy:0.8561 Error: 0.54741 Loss:0.65239 Threads: 8 Forward time: 7.54s Backward time: 5.30s Step time: 5.40s\n", - "256560 Examples seen. Accuracy:0.8565 Error: 0.45397 Loss:0.45796 Threads: 8 Forward time: 6.71s Backward time: 4.68s Step time: 5.55s\n", - "257200 Examples seen. Accuracy:0.8600 Error: 0.35031 Loss:0.29598 Threads: 8 Forward time: 6.64s Backward time: 4.60s Step time: 5.31s\n", - "257840 Examples seen. Accuracy:0.8588 Error: 0.52212 Loss:0.54196 Threads: 8 Forward time: 5.80s Backward time: 3.94s Step time: 5.50s\n", - "258480 Examples seen. Accuracy:0.8585 Error: 0.33339 Loss:0.32437 Threads: 8 Forward time: 6.36s Backward time: 4.52s Step time: 5.53s\n", - "259120 Examples seen. Accuracy:0.8587 Error: 0.48267 Loss:0.75483 Threads: 8 Forward time: 6.12s Backward time: 4.23s Step time: 5.49s\n", - "259760 Examples seen. Accuracy:0.8601 Error: 0.37181 Loss:0.28587 Threads: 8 Forward time: 6.58s Backward time: 4.64s Step time: 5.53s\n", - "260400 Examples seen. Accuracy:0.8621 Error: 0.31563 Loss:0.40805 Threads: 8 Forward time: 6.36s Backward time: 4.42s Step time: 5.27s\n", - "261040 Examples seen. Accuracy:0.8622 Error: 0.33065 Loss:0.29644 Threads: 8 Forward time: 6.17s Backward time: 4.29s Step time: 5.18s\n", - "261680 Examples seen. Accuracy:0.8641 Error: 0.30131 Loss:0.29589 Threads: 8 Forward time: 6.20s Backward time: 4.41s Step time: 5.24s\n", - "262320 Examples seen. Accuracy:0.8663 Error: 0.21092 Loss:0.25346 Threads: 8 Forward time: 5.78s Backward time: 4.02s Step time: 5.01s\n", - "262960 Examples seen. Accuracy:0.8680 Error: 0.38215 Loss:0.33075 Threads: 8 Forward time: 5.40s Backward time: 3.77s Step time: 5.08s\n", - "263600 Examples seen. Accuracy:0.8682 Error: 0.26736 Loss:0.24997 Threads: 8 Forward time: 6.68s Backward time: 4.65s Step time: 5.49s\n", - "264240 Examples seen. Accuracy:0.8679 Error: 0.35094 Loss:0.32577 Threads: 8 Forward time: 6.54s Backward time: 4.44s Step time: 6.06s\n", - "264880 Examples seen. Accuracy:0.8682 Error: 0.47006 Loss:0.65329 Threads: 8 Forward time: 5.25s Backward time: 3.64s Step time: 4.91s\n", - "265520 Examples seen. Accuracy:0.8682 Error: 0.32088 Loss:0.31916 Threads: 8 Forward time: 5.68s Backward time: 3.98s Step time: 4.27s\n", - "266160 Examples seen. Accuracy:0.8689 Error: 0.29626 Loss:0.23896 Threads: 8 Forward time: 5.31s Backward time: 3.70s Step time: 4.93s\n", - "266800 Examples seen. Accuracy:0.8700 Error: 0.25193 Loss:0.37996 Threads: 8 Forward time: 5.42s Backward time: 3.81s Step time: 4.78s\n", - "267440 Examples seen. Accuracy:0.8717 Error: 0.21002 Loss:0.14987 Threads: 8 Forward time: 5.28s Backward time: 3.69s Step time: 4.89s\n", - "268080 Examples seen. Accuracy:0.8717 Error: 0.41192 Loss:0.49685 Threads: 8 Forward time: 5.69s Backward time: 4.05s Step time: 4.63s\n", - "268720 Examples seen. Accuracy:0.8699 Error: 0.37305 Loss:0.39862 Threads: 8 Forward time: 5.71s Backward time: 4.00s Step time: 4.73s\n", - "269360 Examples seen. Accuracy:0.8716 Error: 0.28204 Loss:0.28513 Threads: 8 Forward time: 5.11s Backward time: 3.56s Step time: 4.07s\n", - "270000 Examples seen. Accuracy:0.8714 Error: 0.32895 Loss:0.29585 Threads: 8 Forward time: 5.14s Backward time: 3.59s Step time: 4.38s\n", - "270640 Examples seen. Accuracy:0.8712 Error: 0.18401 Loss:0.21646 Threads: 8 Forward time: 4.99s Backward time: 3.54s Step time: 4.41s\n", - "271280 Examples seen. Accuracy:0.8693 Error: 0.43247 Loss:0.60597 Threads: 8 Forward time: 4.94s Backward time: 3.56s Step time: 3.79s\n", - "271920 Examples seen. Accuracy:0.8696 Error: 0.43727 Loss:0.66205 Threads: 8 Forward time: 4.94s Backward time: 3.56s Step time: 3.82s\n", - "272560 Examples seen. Accuracy:0.8690 Error: 0.35901 Loss:0.39837 Threads: 8 Forward time: 4.98s Backward time: 3.55s Step time: 3.80s\n", - "273200 Examples seen. Accuracy:0.8719 Error: 0.29738 Loss:0.32262 Threads: 8 Forward time: 4.96s Backward time: 3.55s Step time: 3.79s\n", - "273840 Examples seen. Accuracy:0.8691 Error: 0.33896 Loss:0.36114 Threads: 8 Forward time: 4.97s Backward time: 3.55s Step time: 3.83s\n", - "274480 Examples seen. Accuracy:0.8694 Error: 0.34602 Loss:0.38747 Threads: 8 Forward time: 4.98s Backward time: 3.54s Step time: 3.75s\n", - "275120 Examples seen. Accuracy:0.8720 Error: 0.30371 Loss:0.60516 Threads: 8 Forward time: 5.05s Backward time: 3.58s Step time: 3.77s\n", - "275760 Examples seen. Accuracy:0.8720 Error: 0.48180 Loss:0.56105 Threads: 8 Forward time: 4.92s Backward time: 3.54s Step time: 3.79s\n", - "276400 Examples seen. Accuracy:0.8686 Error: 0.42068 Loss:0.51996 Threads: 8 Forward time: 4.97s Backward time: 3.52s Step time: 3.78s\n", - "277040 Examples seen. Accuracy:0.8697 Error: 0.33638 Loss:0.36763 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.81s\n", - "277680 Examples seen. Accuracy:0.8679 Error: 0.40034 Loss:0.49190 Threads: 8 Forward time: 5.07s Backward time: 3.62s Step time: 3.81s\n", - "278320 Examples seen. Accuracy:0.8674 Error: 0.32358 Loss:0.38193 Threads: 8 Forward time: 4.97s Backward time: 3.56s Step time: 3.77s\n", - "278960 Examples seen. Accuracy:0.8693 Error: 0.26562 Loss:0.27574 Threads: 8 Forward time: 4.99s Backward time: 3.55s Step time: 3.89s\n", - "279600 Examples seen. Accuracy:0.8698 Error: 0.31484 Loss:0.38780 Threads: 8 Forward time: 4.98s Backward time: 3.55s Step time: 3.76s\n", - "280240 Examples seen. Accuracy:0.8715 Error: 0.30730 Loss:0.36770 Threads: 8 Forward time: 4.99s Backward time: 3.50s Step time: 3.77s\n", - "280880 Examples seen. Accuracy:0.8723 Error: 0.41658 Loss:0.51453 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.80s\n", - "281520 Examples seen. Accuracy:0.8715 Error: 0.44212 Loss:0.76795 Threads: 8 Forward time: 5.00s Backward time: 3.52s Step time: 3.86s\n", - "282160 Examples seen. Accuracy:0.8708 Error: 0.42520 Loss:0.37480 Threads: 8 Forward time: 4.95s Backward time: 3.46s Step time: 3.80s\n", - "282800 Examples seen. Accuracy:0.8709 Error: 0.45617 Loss:0.51872 Threads: 8 Forward time: 5.02s Backward time: 3.52s Step time: 3.93s\n", - "283440 Examples seen. Accuracy:0.8713 Error: 0.29606 Loss:0.44740 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.81s\n", - "284080 Examples seen. Accuracy:0.8748 Error: 0.35755 Loss:0.39252 Threads: 8 Forward time: 4.98s Backward time: 3.51s Step time: 3.75s\n", - "284720 Examples seen. Accuracy:0.8759 Error: 0.38344 Loss:0.34992 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.74s\n", - "285360 Examples seen. Accuracy:0.8758 Error: 0.29416 Loss:0.25656 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.73s\n", - "286000 Examples seen. Accuracy:0.8765 Error: 0.20789 Loss:0.15329 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.73s\n", - "286640 Examples seen. Accuracy:0.8745 Error: 0.37439 Loss:0.31489 Threads: 8 Forward time: 5.00s Backward time: 3.49s Step time: 3.75s\n", - "287280 Examples seen. Accuracy:0.8743 Error: 0.34163 Loss:0.42036 Threads: 8 Forward time: 4.92s Backward time: 3.49s Step time: 3.75s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "287920 Examples seen. Accuracy:0.8740 Error: 0.25792 Loss:0.27702 Threads: 8 Forward time: 4.93s Backward time: 3.47s Step time: 3.73s\n", - "288560 Examples seen. Accuracy:0.8731 Error: 0.36310 Loss:0.48028 Threads: 8 Forward time: 4.90s Backward time: 3.50s Step time: 3.76s\n", - "289200 Examples seen. Accuracy:0.8726 Error: 0.45299 Loss:0.42203 Threads: 8 Forward time: 4.95s Backward time: 3.49s Step time: 3.73s\n", - "289840 Examples seen. Accuracy:0.8694 Error: 0.47282 Loss:0.45889 Threads: 8 Forward time: 4.92s Backward time: 3.50s Step time: 3.72s\n", - "290480 Examples seen. Accuracy:0.8701 Error: 0.30834 Loss:0.31276 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.74s\n", - "291120 Examples seen. Accuracy:0.8710 Error: 0.23451 Loss:0.19223 Threads: 8 Forward time: 5.01s Backward time: 3.50s Step time: 3.79s\n", - "291760 Examples seen. Accuracy:0.8687 Error: 0.31808 Loss:0.35315 Threads: 8 Forward time: 5.00s Backward time: 3.50s Step time: 3.79s\n", - "292400 Examples seen. Accuracy:0.8686 Error: 0.37695 Loss:0.37042 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.76s\n", - "293040 Examples seen. Accuracy:0.8697 Error: 0.25772 Loss:0.22597 Threads: 8 Forward time: 5.02s Backward time: 3.52s Step time: 3.76s\n", - "293680 Examples seen. Accuracy:0.8702 Error: 0.31892 Loss:0.32680 Threads: 8 Forward time: 4.96s Backward time: 3.45s Step time: 3.76s\n", - "294320 Examples seen. Accuracy:0.8702 Error: 0.37245 Loss:0.38415 Threads: 8 Forward time: 4.96s Backward time: 3.48s Step time: 3.75s\n", - "294960 Examples seen. Accuracy:0.8707 Error: 0.24536 Loss:0.32072 Threads: 8 Forward time: 4.97s Backward time: 3.48s Step time: 3.74s\n", - "295600 Examples seen. Accuracy:0.8717 Error: 0.22107 Loss:0.17873 Threads: 8 Forward time: 5.03s Backward time: 3.52s Step time: 3.76s\n", - "296240 Examples seen. Accuracy:0.8715 Error: 0.31787 Loss:0.30973 Threads: 8 Forward time: 5.03s Backward time: 3.52s Step time: 3.82s\n", - "296880 Examples seen. Accuracy:0.8733 Error: 0.25571 Loss:0.25033 Threads: 8 Forward time: 5.10s Backward time: 3.50s Step time: 3.77s\n", - "297520 Examples seen. Accuracy:0.8724 Error: 0.43821 Loss:0.61105 Threads: 8 Forward time: 5.03s Backward time: 3.51s Step time: 3.79s\n", - "298160 Examples seen. Accuracy:0.8717 Error: 0.27836 Loss:0.20738 Threads: 8 Forward time: 5.05s Backward time: 3.48s Step time: 3.78s\n", - "298800 Examples seen. Accuracy:0.8724 Error: 0.33369 Loss:0.35276 Threads: 8 Forward time: 5.06s Backward time: 3.46s Step time: 3.77s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 6 Examples seen:299424 Validation Accuracy: 0.8750 Validation Error: 0.4079 Validation Loss: 0.4092 Total time: 34.99min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 6. Working time: 0.58 hours.\n", - "300064 Examples seen. Accuracy:0.8730 Error: 0.44049 Loss:0.41471 Threads: 8 Forward time: 4.98s Backward time: 3.48s Step time: 3.91s\n", - "300704 Examples seen. Accuracy:0.8741 Error: 0.31149 Loss:0.36726 Threads: 8 Forward time: 4.95s Backward time: 3.46s Step time: 3.85s\n", - "301344 Examples seen. Accuracy:0.8768 Error: 0.39692 Loss:0.37729 Threads: 8 Forward time: 4.95s Backward time: 3.46s Step time: 3.84s\n", - "301984 Examples seen. Accuracy:0.8769 Error: 0.37734 Loss:0.37744 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.85s\n", - "302624 Examples seen. Accuracy:0.8744 Error: 0.33676 Loss:0.39071 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.88s\n", - "303264 Examples seen. Accuracy:0.8744 Error: 0.26232 Loss:0.31777 Threads: 8 Forward time: 4.96s Backward time: 3.49s Step time: 3.82s\n", - "303904 Examples seen. Accuracy:0.8756 Error: 0.17301 Loss:0.13140 Threads: 8 Forward time: 4.92s Backward time: 3.48s Step time: 3.75s\n", - "304544 Examples seen. Accuracy:0.8762 Error: 0.28372 Loss:0.30428 Threads: 8 Forward time: 4.95s Backward time: 3.52s Step time: 3.76s\n", - "305184 Examples seen. Accuracy:0.8781 Error: 0.31444 Loss:0.31657 Threads: 8 Forward time: 5.71s Backward time: 4.01s Step time: 4.24s\n", - "305824 Examples seen. Accuracy:0.8772 Error: 0.34488 Loss:0.36012 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 4.27s\n", - "306464 Examples seen. Accuracy:0.8762 Error: 0.34542 Loss:0.34030 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.77s\n", - "307104 Examples seen. Accuracy:0.8771 Error: 0.27357 Loss:0.25984 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.76s\n", - "307744 Examples seen. Accuracy:0.8772 Error: 0.28256 Loss:0.24896 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.75s\n", - "308384 Examples seen. Accuracy:0.8770 Error: 0.27555 Loss:0.32732 Threads: 8 Forward time: 4.93s Backward time: 3.47s Step time: 3.75s\n", - "309024 Examples seen. Accuracy:0.8782 Error: 0.26220 Loss:0.22220 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.77s\n", - "309664 Examples seen. Accuracy:0.8792 Error: 0.33832 Loss:0.34935 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.77s\n", - "310304 Examples seen. Accuracy:0.8800 Error: 0.19303 Loss:0.20110 Threads: 8 Forward time: 4.93s Backward time: 3.50s Step time: 3.75s\n", - "310944 Examples seen. Accuracy:0.8781 Error: 0.38871 Loss:0.46341 Threads: 8 Forward time: 4.95s Backward time: 3.53s Step time: 3.75s\n", - "311584 Examples seen. Accuracy:0.8768 Error: 0.37301 Loss:0.46918 Threads: 8 Forward time: 4.95s Backward time: 3.51s Step time: 3.76s\n", - "312224 Examples seen. Accuracy:0.8776 Error: 0.41793 Loss:0.55892 Threads: 8 Forward time: 4.92s Backward time: 3.49s Step time: 3.74s\n", - "312864 Examples seen. Accuracy:0.8786 Error: 0.36048 Loss:0.38111 Threads: 8 Forward time: 4.95s Backward time: 3.50s Step time: 3.75s\n", - "313504 Examples seen. Accuracy:0.8785 Error: 0.26750 Loss:0.34519 Threads: 8 Forward time: 4.97s Backward time: 3.48s Step time: 3.83s\n", - "314144 Examples seen. Accuracy:0.8767 Error: 0.30653 Loss:0.36306 Threads: 8 Forward time: 4.99s Backward time: 3.48s Step time: 3.79s\n", - "314784 Examples seen. Accuracy:0.8768 Error: 0.42513 Loss:0.39690 Threads: 8 Forward time: 5.04s Backward time: 3.47s Step time: 3.79s\n", - "315424 Examples seen. Accuracy:0.8790 Error: 0.30792 Loss:0.29558 Threads: 8 Forward time: 5.06s Backward time: 3.53s Step time: 3.82s\n", - "316064 Examples seen. Accuracy:0.8795 Error: 0.44585 Loss:0.50948 Threads: 8 Forward time: 4.99s Backward time: 3.52s Step time: 3.80s\n", - "316704 Examples seen. Accuracy:0.8807 Error: 0.23449 Loss:0.23607 Threads: 8 Forward time: 4.98s Backward time: 3.51s Step time: 3.82s\n", - "317344 Examples seen. Accuracy:0.8812 Error: 0.30097 Loss:0.28438 Threads: 8 Forward time: 4.99s Backward time: 3.48s Step time: 3.78s\n", - "317984 Examples seen. Accuracy:0.8814 Error: 0.29702 Loss:0.28903 Threads: 8 Forward time: 4.98s Backward time: 3.50s Step time: 3.80s\n", - "318624 Examples seen. Accuracy:0.8821 Error: 0.33107 Loss:0.27959 Threads: 8 Forward time: 4.94s Backward time: 3.47s Step time: 3.79s\n", - "319264 Examples seen. Accuracy:0.8834 Error: 0.38918 Loss:0.40117 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.75s\n", - "319904 Examples seen. Accuracy:0.8842 Error: 0.27450 Loss:0.28561 Threads: 8 Forward time: 4.96s Backward time: 3.49s Step time: 3.79s\n", - "320544 Examples seen. Accuracy:0.8824 Error: 0.35142 Loss:0.32862 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.79s\n", - "321184 Examples seen. Accuracy:0.8837 Error: 0.24816 Loss:0.20022 Threads: 8 Forward time: 5.02s Backward time: 3.46s Step time: 3.79s\n", - "321824 Examples seen. Accuracy:0.8837 Error: 0.37376 Loss:0.45675 Threads: 8 Forward time: 5.00s Backward time: 3.49s Step time: 3.79s\n", - "322464 Examples seen. Accuracy:0.8827 Error: 0.26662 Loss:0.26174 Threads: 8 Forward time: 5.01s Backward time: 3.48s Step time: 3.78s\n", - "323104 Examples seen. Accuracy:0.8848 Error: 0.21413 Loss:0.18100 Threads: 8 Forward time: 8.51s Backward time: 5.97s Step time: 4.40s\n", - "323744 Examples seen. Accuracy:0.8831 Error: 0.19351 Loss:0.13206 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.82s\n", - "324384 Examples seen. Accuracy:0.8825 Error: 0.31639 Loss:0.28280 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.77s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "325024 Examples seen. Accuracy:0.8833 Error: 0.32511 Loss:0.36948 Threads: 8 Forward time: 4.96s Backward time: 3.46s Step time: 3.79s\n", - "325664 Examples seen. Accuracy:0.8840 Error: 0.23351 Loss:0.25399 Threads: 8 Forward time: 4.92s Backward time: 3.48s Step time: 3.78s\n", - "326304 Examples seen. Accuracy:0.8851 Error: 0.26503 Loss:0.25498 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.77s\n", - "326944 Examples seen. Accuracy:0.8868 Error: 0.31091 Loss:0.31941 Threads: 8 Forward time: 4.97s Backward time: 3.54s Step time: 3.80s\n", - "327584 Examples seen. Accuracy:0.8863 Error: 0.42101 Loss:0.60087 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.79s\n", - "328224 Examples seen. Accuracy:0.8879 Error: 0.34348 Loss:0.28510 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.78s\n", - "328864 Examples seen. Accuracy:0.8876 Error: 0.33577 Loss:0.32055 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.78s\n", - "329504 Examples seen. Accuracy:0.8873 Error: 0.32396 Loss:0.34236 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.77s\n", - "330144 Examples seen. Accuracy:0.8874 Error: 0.42834 Loss:0.45800 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.78s\n", - "330784 Examples seen. Accuracy:0.8873 Error: 0.37089 Loss:0.38457 Threads: 8 Forward time: 4.99s Backward time: 3.47s Step time: 3.76s\n", - "331424 Examples seen. Accuracy:0.8857 Error: 0.38040 Loss:0.33412 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.78s\n", - "332064 Examples seen. Accuracy:0.8850 Error: 0.26981 Loss:0.24799 Threads: 8 Forward time: 4.96s Backward time: 3.49s Step time: 3.80s\n", - "332704 Examples seen. Accuracy:0.8858 Error: 0.22499 Loss:0.21251 Threads: 8 Forward time: 4.95s Backward time: 3.53s Step time: 3.80s\n", - "333344 Examples seen. Accuracy:0.8868 Error: 0.25093 Loss:0.27546 Threads: 8 Forward time: 5.08s Backward time: 3.61s Step time: 3.86s\n", - "333984 Examples seen. Accuracy:0.8864 Error: 0.40687 Loss:0.42739 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.82s\n", - "334624 Examples seen. Accuracy:0.8855 Error: 0.25305 Loss:0.40589 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.78s\n", - "335264 Examples seen. Accuracy:0.8855 Error: 0.23767 Loss:0.21024 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.76s\n", - "335904 Examples seen. Accuracy:0.8877 Error: 0.33541 Loss:0.31152 Threads: 8 Forward time: 4.96s Backward time: 3.47s Step time: 3.76s\n", - "336544 Examples seen. Accuracy:0.8883 Error: 0.28533 Loss:0.27548 Threads: 8 Forward time: 4.98s Backward time: 3.50s Step time: 3.77s\n", - "337184 Examples seen. Accuracy:0.8829 Error: 0.55519 Loss:0.72949 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.81s\n", - "337824 Examples seen. Accuracy:0.8799 Error: 0.40741 Loss:0.44673 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.76s\n", - "338464 Examples seen. Accuracy:0.8811 Error: 0.29507 Loss:0.34375 Threads: 8 Forward time: 4.99s Backward time: 3.51s Step time: 4.02s\n", - "339104 Examples seen. Accuracy:0.8836 Error: 0.30081 Loss:0.21952 Threads: 8 Forward time: 5.01s Backward time: 3.45s Step time: 3.87s\n", - "339744 Examples seen. Accuracy:0.8833 Error: 0.34809 Loss:0.30790 Threads: 8 Forward time: 4.91s Backward time: 3.49s Step time: 3.79s\n", - "340384 Examples seen. Accuracy:0.8839 Error: 0.31525 Loss:0.28637 Threads: 8 Forward time: 4.92s Backward time: 3.52s Step time: 3.79s\n", - "341024 Examples seen. Accuracy:0.8822 Error: 0.29187 Loss:0.27903 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.81s\n", - "341664 Examples seen. Accuracy:0.8819 Error: 0.28927 Loss:0.34736 Threads: 8 Forward time: 5.00s Backward time: 3.48s Step time: 3.81s\n", - "342304 Examples seen. Accuracy:0.8818 Error: 0.23394 Loss:0.24446 Threads: 8 Forward time: 5.03s Backward time: 3.54s Step time: 3.80s\n", - "342944 Examples seen. Accuracy:0.8816 Error: 0.30263 Loss:0.36698 Threads: 8 Forward time: 4.94s Backward time: 3.50s Step time: 3.79s\n", - "343584 Examples seen. Accuracy:0.8820 Error: 0.20980 Loss:0.20819 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.79s\n", - "344224 Examples seen. Accuracy:0.8826 Error: 0.28569 Loss:0.24812 Threads: 8 Forward time: 4.98s Backward time: 3.52s Step time: 3.79s\n", - "344864 Examples seen. Accuracy:0.8815 Error: 0.28951 Loss:0.29310 Threads: 8 Forward time: 5.04s Backward time: 3.48s Step time: 3.82s\n", - "345504 Examples seen. Accuracy:0.8808 Error: 0.41354 Loss:0.40160 Threads: 8 Forward time: 4.97s Backward time: 3.46s Step time: 3.81s\n", - "346144 Examples seen. Accuracy:0.8791 Error: 0.37657 Loss:0.36933 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.79s\n", - "346784 Examples seen. Accuracy:0.8766 Error: 0.29656 Loss:0.29073 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.79s\n", - "347424 Examples seen. Accuracy:0.8769 Error: 0.22626 Loss:0.19025 Threads: 8 Forward time: 4.97s Backward time: 3.45s Step time: 3.79s\n", - "348064 Examples seen. Accuracy:0.8776 Error: 0.27147 Loss:0.29989 Threads: 8 Forward time: 4.96s Backward time: 3.47s Step time: 3.79s\n", - "348704 Examples seen. Accuracy:0.8778 Error: 0.34234 Loss:0.36908 Threads: 8 Forward time: 4.95s Backward time: 3.49s Step time: 3.78s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 7 Examples seen:349328 Validation Accuracy: 0.8917 Validation Error: 0.3574 Validation Loss: 0.3466 Total time: 40.44min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 7. Working time: 0.67 hours.\n", - "349968 Examples seen. Accuracy:0.8768 Error: 0.36863 Loss:0.35538 Threads: 8 Forward time: 4.95s Backward time: 3.50s Step time: 3.76s\n", - "350608 Examples seen. Accuracy:0.8772 Error: 0.34856 Loss:0.43196 Threads: 8 Forward time: 5.03s Backward time: 3.54s Step time: 3.77s\n", - "351248 Examples seen. Accuracy:0.8779 Error: 0.43606 Loss:0.58930 Threads: 8 Forward time: 5.02s Backward time: 3.49s Step time: 3.78s\n", - "351888 Examples seen. Accuracy:0.8800 Error: 0.45147 Loss:0.49937 Threads: 8 Forward time: 4.95s Backward time: 3.48s Step time: 3.77s\n", - "352528 Examples seen. Accuracy:0.8787 Error: 0.29451 Loss:0.27244 Threads: 8 Forward time: 4.98s Backward time: 3.46s Step time: 3.78s\n", - "353168 Examples seen. Accuracy:0.8802 Error: 0.16149 Loss:0.14533 Threads: 8 Forward time: 4.99s Backward time: 3.50s Step time: 3.79s\n", - "353808 Examples seen. Accuracy:0.8837 Error: 0.20755 Loss:0.26077 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.79s\n", - "354448 Examples seen. Accuracy:0.8835 Error: 0.20552 Loss:0.14693 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.78s\n", - "355088 Examples seen. Accuracy:0.8808 Error: 0.28900 Loss:0.27395 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.78s\n", - "355728 Examples seen. Accuracy:0.8814 Error: 0.26270 Loss:0.21000 Threads: 8 Forward time: 4.97s Backward time: 3.47s Step time: 3.79s\n", - "356368 Examples seen. Accuracy:0.8814 Error: 0.32122 Loss:0.31663 Threads: 8 Forward time: 4.91s Backward time: 3.47s Step time: 3.77s\n", - "357008 Examples seen. Accuracy:0.8820 Error: 0.25219 Loss:0.30103 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.75s\n", - "357648 Examples seen. Accuracy:0.8839 Error: 0.23050 Loss:0.25109 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.76s\n", - "358288 Examples seen. Accuracy:0.8843 Error: 0.19620 Loss:0.16388 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.76s\n", - "358928 Examples seen. Accuracy:0.8841 Error: 0.29312 Loss:0.29182 Threads: 8 Forward time: 4.95s Backward time: 3.50s Step time: 3.75s\n", - "359568 Examples seen. Accuracy:0.8838 Error: 0.21842 Loss:0.15144 Threads: 8 Forward time: 4.92s Backward time: 3.45s Step time: 3.75s\n", - "360208 Examples seen. Accuracy:0.8859 Error: 0.32965 Loss:0.30473 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.73s\n", - "360848 Examples seen. Accuracy:0.8880 Error: 0.18466 Loss:0.13168 Threads: 8 Forward time: 5.00s Backward time: 3.50s Step time: 3.88s\n", - "361488 Examples seen. Accuracy:0.8884 Error: 0.26196 Loss:0.22501 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.79s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "362128 Examples seen. Accuracy:0.8895 Error: 0.20811 Loss:0.25456 Threads: 8 Forward time: 4.92s Backward time: 3.48s Step time: 3.83s\n", - "362768 Examples seen. Accuracy:0.8919 Error: 0.10902 Loss:0.09427 Threads: 8 Forward time: 4.97s Backward time: 3.52s Step time: 3.75s\n", - "363408 Examples seen. Accuracy:0.8940 Error: 0.25671 Loss:0.21309 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.75s\n", - "364048 Examples seen. Accuracy:0.8925 Error: 0.38461 Loss:0.48285 Threads: 8 Forward time: 5.00s Backward time: 3.50s Step time: 3.75s\n", - "364688 Examples seen. Accuracy:0.8890 Error: 0.29568 Loss:0.42930 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.73s\n", - "365328 Examples seen. Accuracy:0.8895 Error: 0.21174 Loss:0.18697 Threads: 8 Forward time: 4.94s Backward time: 3.50s Step time: 3.71s\n", - "365968 Examples seen. Accuracy:0.8891 Error: 0.36499 Loss:0.52891 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.71s\n", - "366608 Examples seen. Accuracy:0.8908 Error: 0.25228 Loss:0.27931 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.71s\n", - "367248 Examples seen. Accuracy:0.8905 Error: 0.29385 Loss:0.28685 Threads: 8 Forward time: 4.95s Backward time: 3.47s Step time: 3.71s\n", - "367888 Examples seen. Accuracy:0.8910 Error: 0.32724 Loss:0.34549 Threads: 8 Forward time: 4.94s Backward time: 3.47s Step time: 3.72s\n", - "368528 Examples seen. Accuracy:0.8916 Error: 0.25854 Loss:0.32644 Threads: 8 Forward time: 5.06s Backward time: 3.53s Step time: 3.78s\n", - "369168 Examples seen. Accuracy:0.8927 Error: 0.16793 Loss:0.11426 Threads: 8 Forward time: 5.05s Backward time: 3.51s Step time: 3.82s\n", - "369808 Examples seen. Accuracy:0.8944 Error: 0.24239 Loss:0.18175 Threads: 8 Forward time: 4.97s Backward time: 3.52s Step time: 3.85s\n", - "370448 Examples seen. Accuracy:0.8929 Error: 0.31733 Loss:0.40820 Threads: 8 Forward time: 4.95s Backward time: 3.51s Step time: 3.80s\n", - "371088 Examples seen. Accuracy:0.8932 Error: 0.30357 Loss:0.25450 Threads: 8 Forward time: 4.96s Backward time: 3.47s Step time: 3.79s\n", - "371728 Examples seen. Accuracy:0.8934 Error: 0.21168 Loss:0.21406 Threads: 8 Forward time: 4.95s Backward time: 3.49s Step time: 3.85s\n", - "372368 Examples seen. Accuracy:0.8910 Error: 0.35418 Loss:0.44411 Threads: 8 Forward time: 4.99s Backward time: 3.48s Step time: 3.78s\n", - "373008 Examples seen. Accuracy:0.8920 Error: 0.36992 Loss:0.37239 Threads: 8 Forward time: 4.99s Backward time: 3.47s Step time: 3.74s\n", - "373648 Examples seen. Accuracy:0.8896 Error: 0.30363 Loss:0.29197 Threads: 8 Forward time: 4.97s Backward time: 3.46s Step time: 3.75s\n", - "374288 Examples seen. Accuracy:0.8892 Error: 0.18495 Loss:0.15381 Threads: 8 Forward time: 5.04s Backward time: 3.51s Step time: 3.80s\n", - "374928 Examples seen. Accuracy:0.8876 Error: 0.50349 Loss:0.63044 Threads: 8 Forward time: 5.02s Backward time: 3.46s Step time: 3.78s\n", - "375568 Examples seen. Accuracy:0.8877 Error: 0.28011 Loss:0.36740 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.77s\n", - "376208 Examples seen. Accuracy:0.8877 Error: 0.28996 Loss:0.28570 Threads: 8 Forward time: 5.08s Backward time: 3.46s Step time: 3.78s\n", - "376848 Examples seen. Accuracy:0.8889 Error: 0.42555 Loss:0.44836 Threads: 8 Forward time: 5.01s Backward time: 3.43s Step time: 4.22s\n", - "377488 Examples seen. Accuracy:0.8912 Error: 0.19588 Loss:0.16835 Threads: 8 Forward time: 4.91s Backward time: 3.47s Step time: 3.71s\n", - "378128 Examples seen. Accuracy:0.8921 Error: 0.25972 Loss:0.27612 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.74s\n", - "378768 Examples seen. Accuracy:0.8925 Error: 0.24128 Loss:0.20132 Threads: 8 Forward time: 4.95s Backward time: 3.47s Step time: 3.72s\n", - "379408 Examples seen. Accuracy:0.8941 Error: 0.28113 Loss:0.22375 Threads: 8 Forward time: 4.91s Backward time: 3.46s Step time: 3.75s\n", - "380048 Examples seen. Accuracy:0.8960 Error: 0.18650 Loss:0.13804 Threads: 8 Forward time: 4.93s Backward time: 3.44s Step time: 3.73s\n", - "380688 Examples seen. Accuracy:0.8948 Error: 0.31853 Loss:0.75931 Threads: 8 Forward time: 4.90s Backward time: 3.42s Step time: 3.72s\n", - "381328 Examples seen. Accuracy:0.8962 Error: 0.24520 Loss:0.22383 Threads: 8 Forward time: 4.97s Backward time: 3.45s Step time: 3.73s\n", - "381968 Examples seen. Accuracy:0.8979 Error: 0.28176 Loss:0.27597 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.73s\n", - "382608 Examples seen. Accuracy:0.8947 Error: 0.41118 Loss:0.44400 Threads: 8 Forward time: 4.91s Backward time: 3.50s Step time: 3.74s\n", - "383248 Examples seen. Accuracy:0.8955 Error: 0.28678 Loss:0.27215 Threads: 8 Forward time: 4.90s Backward time: 3.40s Step time: 3.73s\n", - "383888 Examples seen. Accuracy:0.8973 Error: 0.36359 Loss:0.44842 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.72s\n", - "384528 Examples seen. Accuracy:0.8990 Error: 0.24468 Loss:0.34310 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.73s\n", - "385168 Examples seen. Accuracy:0.8995 Error: 0.10447 Loss:0.07142 Threads: 8 Forward time: 4.90s Backward time: 3.46s Step time: 3.71s\n", - "385808 Examples seen. Accuracy:0.8988 Error: 0.24809 Loss:0.31383 Threads: 8 Forward time: 4.92s Backward time: 3.46s Step time: 3.72s\n", - "386448 Examples seen. Accuracy:0.8986 Error: 0.21520 Loss:0.16863 Threads: 8 Forward time: 4.92s Backward time: 3.45s Step time: 3.71s\n", - "387088 Examples seen. Accuracy:0.8983 Error: 0.13757 Loss:0.09858 Threads: 8 Forward time: 4.89s Backward time: 3.44s Step time: 3.71s\n", - "387728 Examples seen. Accuracy:0.8975 Error: 0.32179 Loss:0.31074 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.81s\n", - "388368 Examples seen. Accuracy:0.8976 Error: 0.19826 Loss:0.21246 Threads: 8 Forward time: 5.00s Backward time: 3.48s Step time: 3.76s\n", - "389008 Examples seen. Accuracy:0.8968 Error: 0.34311 Loss:0.30443 Threads: 8 Forward time: 4.90s Backward time: 3.45s Step time: 3.73s\n", - "389648 Examples seen. Accuracy:0.8958 Error: 0.30185 Loss:0.28582 Threads: 8 Forward time: 5.11s Backward time: 3.48s Step time: 3.77s\n", - "390288 Examples seen. Accuracy:0.8972 Error: 0.35667 Loss:0.33354 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.74s\n", - "390928 Examples seen. Accuracy:0.8974 Error: 0.37074 Loss:0.46821 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.73s\n", - "391568 Examples seen. Accuracy:0.8991 Error: 0.29196 Loss:0.26430 Threads: 8 Forward time: 4.98s Backward time: 3.52s Step time: 3.72s\n", - "392208 Examples seen. Accuracy:0.9002 Error: 0.24413 Loss:0.40399 Threads: 8 Forward time: 4.90s Backward time: 3.46s Step time: 3.72s\n", - "392848 Examples seen. Accuracy:0.8976 Error: 0.27860 Loss:0.28463 Threads: 8 Forward time: 4.94s Backward time: 3.47s Step time: 3.73s\n", - "393488 Examples seen. Accuracy:0.8976 Error: 0.25641 Loss:0.25788 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.71s\n", - "394128 Examples seen. Accuracy:0.8978 Error: 0.25440 Loss:0.22948 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.72s\n", - "394768 Examples seen. Accuracy:0.8961 Error: 0.16535 Loss:0.13436 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.72s\n", - "395408 Examples seen. Accuracy:0.8959 Error: 0.30948 Loss:0.34091 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.73s\n", - "396048 Examples seen. Accuracy:0.8958 Error: 0.09142 Loss:0.05720 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.72s\n", - "396688 Examples seen. Accuracy:0.8944 Error: 0.30728 Loss:0.35313 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.71s\n", - "397328 Examples seen. Accuracy:0.8956 Error: 0.37548 Loss:0.54506 Threads: 8 Forward time: 4.95s Backward time: 3.45s Step time: 3.71s\n", - "397968 Examples seen. Accuracy:0.8949 Error: 0.21717 Loss:0.16693 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.73s\n", - "398608 Examples seen. Accuracy:0.8962 Error: 0.20949 Loss:0.20576 Threads: 8 Forward time: 5.06s Backward time: 3.45s Step time: 3.77s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epochs: 8 Examples seen:399232 Validation Accuracy: 0.9105 Validation Error: 0.3023 Validation Loss: 0.2852 Total time: 45.80min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.265 Min Weight: -0.262 Max Output: 3.990 Min Output: -4.138 TNNetConvolutionLinear 66,66,64 Times: 8.53s 0.40s Parent:0\n", - "Layer 2 Max Output: 3.990 Min Output: -2.102 TNNetMaxPool 33,33,64 Times: 3.61s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.620 Min Weight: 0.282 Max Output: 5.977 Min Output: -3.842 TNNetMovingStdNormalization 33,33,64 Times: 0.28s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.147 Min Weight: -0.143 Max Output: 6.271 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.85s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.246 Min Weight: -0.229 Max Output: 10.426 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.86s 0.18s Parent:4\n", - "Layer 6 Max Output: 10.426 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.51s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.156 Min Weight: -0.170 Max Output: 4.913 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.47s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.163 Min Weight: -0.145 Max Output: 3.961 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.46s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.168 Min Weight: -0.156 Max Output: 5.379 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.42s 0.02s Parent:8\n", - "Layer 10 Max Output: 5.379 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 5.379 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.291 Min Weight: -0.236 Max Output: 11.969 Min Output: -7.291 TNNetFullConnectLinear 39,1,1 Times: 0.04s 0.00s Parent:11\n", - "Layer 13 Max Output: 0.978 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 8. Working time: 0.76 hours.\n", - "399872 Examples seen. Accuracy:0.8994 Error: 0.21442 Loss:0.16581 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.75s\n", - "400512 Examples seen. Accuracy:0.9003 Error: 0.27726 Loss:0.27547 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.76s\n", - "401152 Examples seen. Accuracy:0.8994 Error: 0.31509 Loss:0.43434 Threads: 8 Forward time: 5.04s Backward time: 3.50s Step time: 3.76s\n", - "401792 Examples seen. Accuracy:0.8993 Error: 0.33578 Loss:0.28914 Threads: 8 Forward time: 4.99s Backward time: 3.42s Step time: 4.39s\n", - "402432 Examples seen. Accuracy:0.9006 Error: 0.16803 Loss:0.12056 Threads: 8 Forward time: 4.98s Backward time: 3.46s Step time: 3.72s\n", - "403072 Examples seen. Accuracy:0.9020 Error: 0.15076 Loss:0.11473 Threads: 8 Forward time: 4.94s Backward time: 3.45s Step time: 3.78s\n", - "403712 Examples seen. Accuracy:0.9025 Error: 0.26013 Loss:0.26911 Threads: 8 Forward time: 4.94s Backward time: 3.42s Step time: 3.73s\n", - "404352 Examples seen. Accuracy:0.9032 Error: 0.19309 Loss:0.15420 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.78s\n", - "404992 Examples seen. Accuracy:0.9044 Error: 0.16549 Loss:0.19730 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.72s\n", - "405632 Examples seen. Accuracy:0.9038 Error: 0.24984 Loss:0.28750 Threads: 8 Forward time: 5.03s Backward time: 3.48s Step time: 3.83s\n", - "406272 Examples seen. Accuracy:0.9033 Error: 0.34040 Loss:0.44361 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.75s\n", - "406912 Examples seen. Accuracy:0.9013 Error: 0.27724 Loss:0.32900 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.73s\n", - "407552 Examples seen. Accuracy:0.9017 Error: 0.29230 Loss:0.34874 Threads: 8 Forward time: 4.94s Backward time: 3.45s Step time: 3.74s\n", - "408192 Examples seen. Accuracy:0.9002 Error: 0.37263 Loss:0.48775 Threads: 8 Forward time: 4.94s Backward time: 3.45s Step time: 3.79s\n", - "408832 Examples seen. Accuracy:0.9001 Error: 0.34247 Loss:0.35019 Threads: 8 Forward time: 4.96s Backward time: 3.45s Step time: 3.73s\n", - "409472 Examples seen. Accuracy:0.8991 Error: 0.26548 Loss:0.26771 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.86s\n", - "410112 Examples seen. Accuracy:0.8981 Error: 0.27243 Loss:0.29595 Threads: 8 Forward time: 4.97s Backward time: 3.47s Step time: 3.74s\n", - "410752 Examples seen. Accuracy:0.8965 Error: 0.26792 Loss:0.21175 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.76s\n", - "411392 Examples seen. Accuracy:0.8994 Error: 0.13233 Loss:0.08746 Threads: 8 Forward time: 4.97s Backward time: 3.44s Step time: 3.73s\n", - "412032 Examples seen. Accuracy:0.8994 Error: 0.24668 Loss:0.29498 Threads: 8 Forward time: 4.95s Backward time: 3.43s Step time: 3.81s\n", - "412672 Examples seen. Accuracy:0.8997 Error: 0.40232 Loss:0.56119 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.74s\n", - "413312 Examples seen. Accuracy:0.8988 Error: 0.36374 Loss:0.50164 Threads: 8 Forward time: 5.05s Backward time: 3.45s Step time: 3.85s\n", - "413952 Examples seen. Accuracy:0.8990 Error: 0.27126 Loss:0.31077 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.83s\n", - "414592 Examples seen. Accuracy:0.8972 Error: 0.24241 Loss:0.22294 Threads: 8 Forward time: 4.99s Backward time: 3.43s Step time: 3.80s\n", - "415232 Examples seen. Accuracy:0.8982 Error: 0.25018 Loss:0.23907 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.82s\n", - "415872 Examples seen. Accuracy:0.8977 Error: 0.42614 Loss:0.54798 Threads: 8 Forward time: 4.94s Backward time: 3.44s Step time: 3.88s\n", - "416512 Examples seen. Accuracy:0.8982 Error: 0.30717 Loss:0.29425 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.84s\n", - "417152 Examples seen. Accuracy:0.8976 Error: 0.27737 Loss:0.28189 Threads: 8 Forward time: 5.17s Backward time: 3.50s Step time: 3.86s\n", - "417792 Examples seen. Accuracy:0.8999 Error: 0.37017 Loss:0.34436 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.88s\n", - "418432 Examples seen. Accuracy:0.8997 Error: 0.24142 Loss:0.22316 Threads: 8 Forward time: 4.97s Backward time: 3.46s Step time: 3.82s\n", - "419072 Examples seen. Accuracy:0.8996 Error: 0.29826 Loss:0.25407 Threads: 8 Forward time: 4.91s Backward time: 3.41s Step time: 3.74s\n", - "419712 Examples seen. Accuracy:0.8999 Error: 0.26656 Loss:0.25406 Threads: 8 Forward time: 4.91s Backward time: 3.41s Step time: 3.72s\n", - "420352 Examples seen. Accuracy:0.9016 Error: 0.30254 Loss:0.31285 Threads: 8 Forward time: 4.92s Backward time: 3.41s Step time: 3.70s\n", - "420992 Examples seen. Accuracy:0.9012 Error: 0.34374 Loss:0.37865 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.73s\n", - "421632 Examples seen. Accuracy:0.9007 Error: 0.29218 Loss:0.31859 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.72s\n", - "422272 Examples seen. Accuracy:0.9022 Error: 0.28574 Loss:0.28517 Threads: 8 Forward time: 5.01s Backward time: 3.41s Step time: 3.74s\n", - "422912 Examples seen. Accuracy:0.9028 Error: 0.33697 Loss:0.40628 Threads: 8 Forward time: 5.01s Backward time: 3.44s Step time: 3.77s\n", - "423552 Examples seen. Accuracy:0.9031 Error: 0.27130 Loss:0.24747 Threads: 8 Forward time: 4.95s Backward time: 3.45s Step time: 3.73s\n", - "424192 Examples seen. Accuracy:0.9050 Error: 0.22315 Loss:0.23372 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.71s\n", - "424832 Examples seen. Accuracy:0.9050 Error: 0.30035 Loss:0.25561 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.70s\n", - "425472 Examples seen. Accuracy:0.9050 Error: 0.28654 Loss:0.35782 Threads: 8 Forward time: 4.93s Backward time: 3.44s Step time: 3.70s\n", - "426112 Examples seen. Accuracy:0.9046 Error: 0.30567 Loss:0.26560 Threads: 8 Forward time: 4.92s Backward time: 3.43s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "426752 Examples seen. Accuracy:0.9069 Error: 0.25852 Loss:0.25752 Threads: 8 Forward time: 5.00s Backward time: 3.45s Step time: 3.69s\n", - "427392 Examples seen. Accuracy:0.9069 Error: 0.11511 Loss:0.10765 Threads: 8 Forward time: 4.98s Backward time: 3.45s Step time: 3.75s\n", - "428032 Examples seen. Accuracy:0.9068 Error: 0.26568 Loss:0.31001 Threads: 8 Forward time: 5.02s Backward time: 3.47s Step time: 3.73s\n", - "428672 Examples seen. Accuracy:0.9069 Error: 0.26464 Loss:0.19554 Threads: 8 Forward time: 5.00s Backward time: 3.44s Step time: 3.74s\n", - "429312 Examples seen. Accuracy:0.9045 Error: 0.35197 Loss:0.42663 Threads: 8 Forward time: 4.99s Backward time: 3.46s Step time: 3.73s\n", - "429952 Examples seen. Accuracy:0.9037 Error: 0.40357 Loss:0.45106 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.73s\n", - "430592 Examples seen. Accuracy:0.9030 Error: 0.47551 Loss:0.59034 Threads: 8 Forward time: 5.04s Backward time: 3.42s Step time: 3.72s\n", - "431232 Examples seen. Accuracy:0.9040 Error: 0.21036 Loss:0.18538 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 4.25s\n", - "431872 Examples seen. Accuracy:0.9044 Error: 0.24429 Loss:0.35991 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.70s\n", - "432512 Examples seen. Accuracy:0.9046 Error: 0.19176 Loss:0.18209 Threads: 8 Forward time: 4.96s Backward time: 3.44s Step time: 3.74s\n", - "433152 Examples seen. Accuracy:0.9030 Error: 0.23679 Loss:0.22339 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.75s\n", - "433792 Examples seen. Accuracy:0.9009 Error: 0.22331 Loss:0.20803 Threads: 8 Forward time: 4.96s Backward time: 3.44s Step time: 3.76s\n", - "434432 Examples seen. Accuracy:0.8985 Error: 0.28498 Loss:0.25923 Threads: 8 Forward time: 4.94s Backward time: 3.43s Step time: 3.75s\n", - "435072 Examples seen. Accuracy:0.9009 Error: 0.17313 Loss:0.14283 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.75s\n", - "435712 Examples seen. Accuracy:0.9019 Error: 0.10770 Loss:0.07412 Threads: 8 Forward time: 4.98s Backward time: 3.45s Step time: 3.74s\n", - "436352 Examples seen. Accuracy:0.9031 Error: 0.17378 Loss:0.13147 Threads: 8 Forward time: 4.96s Backward time: 3.44s Step time: 3.79s\n", - "436992 Examples seen. Accuracy:0.9054 Error: 0.22728 Loss:0.22468 Threads: 8 Forward time: 4.96s Backward time: 3.45s Step time: 3.74s\n", - "437632 Examples seen. Accuracy:0.9045 Error: 0.26207 Loss:0.23495 Threads: 8 Forward time: 4.94s Backward time: 3.46s Step time: 3.73s\n", - "438272 Examples seen. Accuracy:0.9071 Error: 0.25478 Loss:0.27596 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.77s\n", - "438912 Examples seen. Accuracy:0.9066 Error: 0.31478 Loss:0.37640 Threads: 8 Forward time: 5.08s Backward time: 3.45s Step time: 3.75s\n", - "439552 Examples seen. Accuracy:0.9073 Error: 0.28200 Loss:0.28881 Threads: 8 Forward time: 4.92s Backward time: 3.43s Step time: 3.73s\n", - "440192 Examples seen. Accuracy:0.9063 Error: 0.28748 Loss:0.36401 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.71s\n", - "440832 Examples seen. Accuracy:0.9066 Error: 0.27006 Loss:0.27133 Threads: 8 Forward time: 4.98s Backward time: 3.44s Step time: 3.71s\n", - "441472 Examples seen. Accuracy:0.9075 Error: 0.23637 Loss:0.22124 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.73s\n", - "442112 Examples seen. Accuracy:0.9070 Error: 0.19347 Loss:0.18379 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.72s\n", - "442752 Examples seen. Accuracy:0.9046 Error: 0.29052 Loss:0.27703 Threads: 8 Forward time: 4.95s Backward time: 3.40s Step time: 3.72s\n", - "443392 Examples seen. Accuracy:0.9040 Error: 0.37173 Loss:0.53982 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.73s\n", - "444032 Examples seen. Accuracy:0.9023 Error: 0.43279 Loss:0.45847 Threads: 8 Forward time: 4.96s Backward time: 3.46s Step time: 3.73s\n", - "444672 Examples seen. Accuracy:0.9014 Error: 0.11264 Loss:0.10107 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.73s\n", - "445312 Examples seen. Accuracy:0.8993 Error: 0.36809 Loss:0.40034 Threads: 8 Forward time: 5.02s Backward time: 3.45s Step time: 3.78s\n", - "445952 Examples seen. Accuracy:0.9005 Error: 0.29819 Loss:0.40921 Threads: 8 Forward time: 4.98s Backward time: 3.39s Step time: 3.76s\n", - "446592 Examples seen. Accuracy:0.9026 Error: 0.32405 Loss:0.44635 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.75s\n", - "447232 Examples seen. Accuracy:0.9034 Error: 0.25668 Loss:0.27812 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.72s\n", - "447872 Examples seen. Accuracy:0.9027 Error: 0.32747 Loss:0.31855 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.72s\n", - "448512 Examples seen. Accuracy:0.9027 Error: 0.22717 Loss:0.28368 Threads: 8 Forward time: 4.96s Backward time: 3.43s Step time: 3.73s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 9 Examples seen:449136 Validation Accuracy: 0.9261 Validation Error: 0.2449 Validation Loss: 0.2281 Total time: 51.18min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 9. Working time: 0.85 hours.\n", - "449776 Examples seen. Accuracy:0.8999 Error: 0.20598 Loss:0.19802 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.74s\n", - "450416 Examples seen. Accuracy:0.9011 Error: 0.29955 Loss:0.24860 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.72s\n", - "451056 Examples seen. Accuracy:0.9013 Error: 0.27500 Loss:0.32053 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.72s\n", - "451696 Examples seen. Accuracy:0.9010 Error: 0.31762 Loss:0.35769 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.77s\n", - "452336 Examples seen. Accuracy:0.9012 Error: 0.20646 Loss:0.19609 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.72s\n", - "452976 Examples seen. Accuracy:0.8994 Error: 0.30841 Loss:0.33099 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.73s\n", - "453616 Examples seen. Accuracy:0.8981 Error: 0.48714 Loss:0.50947 Threads: 8 Forward time: 5.02s Backward time: 3.40s Step time: 3.73s\n", - "454256 Examples seen. Accuracy:0.8983 Error: 0.27420 Loss:0.25333 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.75s\n", - "454896 Examples seen. Accuracy:0.9002 Error: 0.29410 Loss:0.31086 Threads: 8 Forward time: 5.23s Backward time: 3.62s Step time: 3.76s\n", - "455536 Examples seen. Accuracy:0.9000 Error: 0.36615 Loss:0.55404 Threads: 8 Forward time: 4.99s Backward time: 3.43s Step time: 4.30s\n", - "456176 Examples seen. Accuracy:0.8995 Error: 0.31797 Loss:0.34255 Threads: 8 Forward time: 4.95s Backward time: 3.38s Step time: 3.71s\n", - "456816 Examples seen. Accuracy:0.9013 Error: 0.13621 Loss:0.09656 Threads: 8 Forward time: 5.00s Backward time: 3.42s Step time: 3.71s\n", - "457456 Examples seen. Accuracy:0.8996 Error: 0.23699 Loss:0.28439 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.76s\n", - "458096 Examples seen. Accuracy:0.8997 Error: 0.26329 Loss:0.36544 Threads: 8 Forward time: 4.94s Backward time: 3.41s Step time: 3.79s\n", - "458736 Examples seen. Accuracy:0.9002 Error: 0.31634 Loss:0.39390 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.74s\n", - "459376 Examples seen. Accuracy:0.9019 Error: 0.20197 Loss:0.20080 Threads: 8 Forward time: 4.93s Backward time: 3.40s Step time: 3.72s\n", - "460016 Examples seen. Accuracy:0.9020 Error: 0.23047 Loss:0.17820 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.73s\n", - "460656 Examples seen. Accuracy:0.9019 Error: 0.19411 Loss:0.17867 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.73s\n", - "461296 Examples seen. Accuracy:0.9027 Error: 0.22714 Loss:0.19193 Threads: 8 Forward time: 4.92s Backward time: 3.43s Step time: 3.72s\n", - "461936 Examples seen. Accuracy:0.9042 Error: 0.19459 Loss:0.16151 Threads: 8 Forward time: 4.92s Backward time: 3.38s Step time: 3.75s\n", - "462576 Examples seen. Accuracy:0.9015 Error: 0.22002 Loss:0.18069 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.72s\n", - "463216 Examples seen. Accuracy:0.9030 Error: 0.18416 Loss:0.15353 Threads: 8 Forward time: 4.98s Backward time: 3.41s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "463856 Examples seen. Accuracy:0.9045 Error: 0.11846 Loss:0.09183 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.72s\n", - "464496 Examples seen. Accuracy:0.9064 Error: 0.21130 Loss:0.20232 Threads: 8 Forward time: 4.94s Backward time: 3.41s Step time: 3.72s\n", - "465136 Examples seen. Accuracy:0.9036 Error: 0.34616 Loss:0.57110 Threads: 8 Forward time: 4.90s Backward time: 3.42s Step time: 3.70s\n", - "465776 Examples seen. Accuracy:0.9024 Error: 0.25141 Loss:0.26351 Threads: 8 Forward time: 4.91s Backward time: 3.43s Step time: 3.71s\n", - "466416 Examples seen. Accuracy:0.9017 Error: 0.29043 Loss:0.35970 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.69s\n", - "467056 Examples seen. Accuracy:0.9018 Error: 0.32809 Loss:0.44279 Threads: 8 Forward time: 4.95s Backward time: 3.39s Step time: 3.76s\n", - "467696 Examples seen. Accuracy:0.9012 Error: 0.32153 Loss:0.33375 Threads: 8 Forward time: 5.00s Backward time: 3.44s Step time: 3.71s\n", - "468336 Examples seen. Accuracy:0.9023 Error: 0.26314 Loss:0.24968 Threads: 8 Forward time: 4.97s Backward time: 3.44s Step time: 3.71s\n", - "468976 Examples seen. Accuracy:0.9046 Error: 0.21019 Loss:0.18661 Threads: 8 Forward time: 4.93s Backward time: 3.42s Step time: 3.70s\n", - "469616 Examples seen. Accuracy:0.9062 Error: 0.32064 Loss:0.37426 Threads: 8 Forward time: 4.94s Backward time: 3.42s Step time: 3.80s\n", - "470256 Examples seen. Accuracy:0.9068 Error: 0.34623 Loss:0.39703 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.70s\n", - "470896 Examples seen. Accuracy:0.9040 Error: 0.27478 Loss:0.26097 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.69s\n", - "471536 Examples seen. Accuracy:0.9036 Error: 0.21156 Loss:0.17101 Threads: 8 Forward time: 4.92s Backward time: 3.46s Step time: 3.69s\n", - "472176 Examples seen. Accuracy:0.9031 Error: 0.32779 Loss:0.34318 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.70s\n", - "472816 Examples seen. Accuracy:0.9039 Error: 0.18085 Loss:0.18187 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.70s\n", - "473456 Examples seen. Accuracy:0.9047 Error: 0.25392 Loss:0.22012 Threads: 8 Forward time: 4.95s Backward time: 3.39s Step time: 3.71s\n", - "474096 Examples seen. Accuracy:0.9061 Error: 0.19583 Loss:0.39767 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.70s\n", - "474736 Examples seen. Accuracy:0.9050 Error: 0.27020 Loss:0.24882 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.70s\n", - "475376 Examples seen. Accuracy:0.9039 Error: 0.25722 Loss:0.34675 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.71s\n", - "476016 Examples seen. Accuracy:0.9034 Error: 0.25629 Loss:0.20712 Threads: 8 Forward time: 4.99s Backward time: 3.43s Step time: 3.72s\n", - "476656 Examples seen. Accuracy:0.9038 Error: 0.27931 Loss:0.25116 Threads: 8 Forward time: 4.98s Backward time: 3.41s Step time: 3.74s\n", - "477296 Examples seen. Accuracy:0.9039 Error: 0.24468 Loss:0.28487 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.72s\n", - "477936 Examples seen. Accuracy:0.9054 Error: 0.20802 Loss:0.24282 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.70s\n", - "478576 Examples seen. Accuracy:0.9067 Error: 0.26210 Loss:0.26591 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.72s\n", - "479216 Examples seen. Accuracy:0.9079 Error: 0.20256 Loss:0.17632 Threads: 8 Forward time: 4.90s Backward time: 3.38s Step time: 3.70s\n", - "479856 Examples seen. Accuracy:0.9085 Error: 0.24697 Loss:0.20387 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.70s\n", - "480496 Examples seen. Accuracy:0.9078 Error: 0.28552 Loss:0.32312 Threads: 8 Forward time: 5.03s Backward time: 3.45s Step time: 3.74s\n", - "481136 Examples seen. Accuracy:0.9082 Error: 0.24127 Loss:0.29530 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.79s\n", - "481776 Examples seen. Accuracy:0.9097 Error: 0.22376 Loss:0.20318 Threads: 8 Forward time: 5.03s Backward time: 3.42s Step time: 3.73s\n", - "482416 Examples seen. Accuracy:0.9084 Error: 0.30161 Loss:0.32139 Threads: 8 Forward time: 5.02s Backward time: 3.42s Step time: 3.75s\n", - "483056 Examples seen. Accuracy:0.9080 Error: 0.25141 Loss:0.22090 Threads: 8 Forward time: 5.02s Backward time: 3.43s Step time: 3.74s\n", - "483696 Examples seen. Accuracy:0.9081 Error: 0.32059 Loss:0.43725 Threads: 8 Forward time: 5.01s Backward time: 3.44s Step time: 3.74s\n", - "484336 Examples seen. Accuracy:0.9090 Error: 0.15752 Loss:0.14132 Threads: 8 Forward time: 5.01s Backward time: 3.43s Step time: 4.28s\n", - "484976 Examples seen. Accuracy:0.9097 Error: 0.29585 Loss:0.39107 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.76s\n", - "485616 Examples seen. Accuracy:0.9101 Error: 0.22055 Loss:0.17601 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.72s\n", - "486256 Examples seen. Accuracy:0.9107 Error: 0.17638 Loss:0.11782 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.70s\n", - "486896 Examples seen. Accuracy:0.9099 Error: 0.40099 Loss:0.41963 Threads: 8 Forward time: 5.01s Backward time: 3.41s Step time: 3.76s\n", - "487536 Examples seen. Accuracy:0.9071 Error: 0.38620 Loss:0.47616 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.75s\n", - "488176 Examples seen. Accuracy:0.9080 Error: 0.26703 Loss:0.22957 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.76s\n", - "488816 Examples seen. Accuracy:0.9097 Error: 0.21145 Loss:0.31018 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.67s\n", - "489456 Examples seen. Accuracy:0.9100 Error: 0.28310 Loss:0.27703 Threads: 8 Forward time: 5.00s Backward time: 3.44s Step time: 3.78s\n", - "490096 Examples seen. Accuracy:0.9078 Error: 0.21226 Loss:0.27938 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.70s\n", - "490736 Examples seen. Accuracy:0.9087 Error: 0.20895 Loss:0.19127 Threads: 8 Forward time: 4.97s Backward time: 3.43s Step time: 3.69s\n", - "491376 Examples seen. Accuracy:0.9085 Error: 0.17187 Loss:0.16533 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.70s\n", - "492016 Examples seen. Accuracy:0.9087 Error: 0.32022 Loss:0.29375 Threads: 8 Forward time: 5.02s Backward time: 3.45s Step time: 3.71s\n", - "492656 Examples seen. Accuracy:0.9078 Error: 0.26783 Loss:0.30460 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.79s\n", - "493296 Examples seen. Accuracy:0.9065 Error: 0.26092 Loss:0.30560 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.70s\n", - "493936 Examples seen. Accuracy:0.9058 Error: 0.23102 Loss:0.22465 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.67s\n", - "494576 Examples seen. Accuracy:0.9058 Error: 0.37346 Loss:0.40639 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.68s\n", - "495216 Examples seen. Accuracy:0.9076 Error: 0.17668 Loss:0.18873 Threads: 8 Forward time: 4.90s Backward time: 3.37s Step time: 3.66s\n", - "495856 Examples seen. Accuracy:0.9081 Error: 0.26991 Loss:0.23883 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.65s\n", - "496496 Examples seen. Accuracy:0.9090 Error: 0.18075 Loss:0.15428 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.66s\n", - "497136 Examples seen. Accuracy:0.9078 Error: 0.27681 Loss:0.28423 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.66s\n", - "497776 Examples seen. Accuracy:0.9080 Error: 0.21698 Loss:0.25634 Threads: 8 Forward time: 4.91s Backward time: 3.37s Step time: 3.66s\n", - "498416 Examples seen. Accuracy:0.9084 Error: 0.20058 Loss:0.13238 Threads: 8 Forward time: 4.92s Backward time: 3.36s Step time: 3.65s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 10 Examples seen:499040 Validation Accuracy: 0.9384 Validation Error: 0.1939 Validation Loss: 0.1803 Total time: 56.51min\n", - "Starting Testing.\n", - "Epochs: 10 Examples seen:499040 Test Accuracy: 0.9467 Test Error: 0.1922 Test Loss: 0.1646 Total time: 56.98min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 10. Working time: 0.95 hours.\n", - "Learning rate set to:0.00090\n", - "499680 Examples seen. Accuracy:0.9075 Error: 0.25707 Loss:0.21631 Threads: 8 Forward time: 4.92s Backward time: 3.36s Step time: 3.75s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "500320 Examples seen. Accuracy:0.9101 Error: 0.20803 Loss:0.16103 Threads: 8 Forward time: 5.03s Backward time: 3.39s Step time: 3.72s\n", - "500960 Examples seen. Accuracy:0.9105 Error: 0.16927 Loss:0.26840 Threads: 8 Forward time: 5.06s Backward time: 3.41s Step time: 3.74s\n", - "501600 Examples seen. Accuracy:0.9110 Error: 0.21942 Loss:0.24421 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.73s\n", - "502240 Examples seen. Accuracy:0.9120 Error: 0.21498 Loss:0.18714 Threads: 8 Forward time: 5.03s Backward time: 3.42s Step time: 3.75s\n", - "502880 Examples seen. Accuracy:0.9127 Error: 0.13512 Loss:0.09639 Threads: 8 Forward time: 5.00s Backward time: 3.45s Step time: 3.74s\n", - "503520 Examples seen. Accuracy:0.9126 Error: 0.28967 Loss:0.28949 Threads: 8 Forward time: 5.09s Backward time: 3.46s Step time: 3.80s\n", - "504160 Examples seen. Accuracy:0.9131 Error: 0.21126 Loss:0.20389 Threads: 8 Forward time: 6.99s Backward time: 5.28s Step time: 4.08s\n", - "504800 Examples seen. Accuracy:0.9142 Error: 0.24593 Loss:0.30065 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.97s\n", - "505440 Examples seen. Accuracy:0.9130 Error: 0.34897 Loss:0.36845 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.71s\n", - "506080 Examples seen. Accuracy:0.9130 Error: 0.32447 Loss:0.40064 Threads: 8 Forward time: 4.91s Backward time: 3.38s Step time: 3.71s\n", - "506720 Examples seen. Accuracy:0.9142 Error: 0.21962 Loss:0.22803 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.71s\n", - "507360 Examples seen. Accuracy:0.9139 Error: 0.27843 Loss:0.27879 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.69s\n", - "508000 Examples seen. Accuracy:0.9144 Error: 0.18976 Loss:0.17273 Threads: 8 Forward time: 4.99s Backward time: 3.42s Step time: 3.68s\n", - "508640 Examples seen. Accuracy:0.9145 Error: 0.18255 Loss:0.12871 Threads: 8 Forward time: 5.00s Backward time: 3.42s Step time: 3.68s\n", - "509280 Examples seen. Accuracy:0.9154 Error: 0.18851 Loss:0.22011 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.69s\n", - "509920 Examples seen. Accuracy:0.9142 Error: 0.37877 Loss:0.36610 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.69s\n", - "510560 Examples seen. Accuracy:0.9147 Error: 0.18671 Loss:0.17910 Threads: 8 Forward time: 4.93s Backward time: 3.42s Step time: 3.66s\n", - "511200 Examples seen. Accuracy:0.9148 Error: 0.13493 Loss:0.08595 Threads: 8 Forward time: 4.91s Backward time: 3.42s Step time: 3.66s\n", - "511840 Examples seen. Accuracy:0.9153 Error: 0.25926 Loss:0.30186 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.69s\n", - "512480 Examples seen. Accuracy:0.9162 Error: 0.05866 Loss:0.06469 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.68s\n", - "513120 Examples seen. Accuracy:0.9164 Error: 0.24818 Loss:0.27046 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.67s\n", - "513760 Examples seen. Accuracy:0.9156 Error: 0.20894 Loss:0.16550 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.72s\n", - "514400 Examples seen. Accuracy:0.9169 Error: 0.22496 Loss:0.25856 Threads: 8 Forward time: 4.98s Backward time: 3.41s Step time: 3.71s\n", - "515040 Examples seen. Accuracy:0.9152 Error: 0.25079 Loss:0.23001 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.71s\n", - "515680 Examples seen. Accuracy:0.9167 Error: 0.22300 Loss:0.16596 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.70s\n", - "516320 Examples seen. Accuracy:0.9162 Error: 0.35507 Loss:0.38129 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.73s\n", - "516960 Examples seen. Accuracy:0.9161 Error: 0.25773 Loss:0.30329 Threads: 8 Forward time: 4.96s Backward time: 3.39s Step time: 3.72s\n", - "517600 Examples seen. Accuracy:0.9169 Error: 0.23529 Loss:0.25212 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.72s\n", - "518240 Examples seen. Accuracy:0.9149 Error: 0.30573 Loss:0.39360 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.72s\n", - "518880 Examples seen. Accuracy:0.9156 Error: 0.20444 Loss:0.21559 Threads: 8 Forward time: 4.94s Backward time: 3.38s Step time: 3.73s\n", - "519520 Examples seen. Accuracy:0.9157 Error: 0.13176 Loss:0.12838 Threads: 8 Forward time: 4.99s Backward time: 3.41s Step time: 3.73s\n", - "520160 Examples seen. Accuracy:0.9163 Error: 0.27645 Loss:0.38159 Threads: 8 Forward time: 5.02s Backward time: 3.40s Step time: 3.71s\n", - "520800 Examples seen. Accuracy:0.9157 Error: 0.20431 Loss:0.23637 Threads: 8 Forward time: 4.99s Backward time: 3.40s Step time: 3.69s\n", - "521440 Examples seen. Accuracy:0.9172 Error: 0.33938 Loss:0.48311 Threads: 8 Forward time: 4.92s Backward time: 3.38s Step time: 3.69s\n", - "522080 Examples seen. Accuracy:0.9188 Error: 0.16453 Loss:0.12930 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.68s\n", - "522720 Examples seen. Accuracy:0.9196 Error: 0.11996 Loss:0.09880 Threads: 8 Forward time: 4.92s Backward time: 3.39s Step time: 3.67s\n", - "523360 Examples seen. Accuracy:0.9166 Error: 0.27744 Loss:0.37526 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.67s\n", - "524000 Examples seen. Accuracy:0.9155 Error: 0.32177 Loss:0.33154 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.66s\n", - "524640 Examples seen. Accuracy:0.9168 Error: 0.14842 Loss:0.11237 Threads: 8 Forward time: 5.07s Backward time: 3.45s Step time: 3.82s\n", - "525280 Examples seen. Accuracy:0.9172 Error: 0.16090 Loss:0.15605 Threads: 8 Forward time: 5.00s Backward time: 3.41s Step time: 3.75s\n", - "525920 Examples seen. Accuracy:0.9168 Error: 0.17786 Loss:0.12825 Threads: 8 Forward time: 5.05s Backward time: 3.42s Step time: 3.78s\n", - "526560 Examples seen. Accuracy:0.9158 Error: 0.24808 Loss:0.33885 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.78s\n", - "527200 Examples seen. Accuracy:0.9155 Error: 0.19414 Loss:0.15540 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.75s\n", - "527840 Examples seen. Accuracy:0.9150 Error: 0.20107 Loss:0.16529 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.76s\n", - "528480 Examples seen. Accuracy:0.9158 Error: 0.21912 Loss:0.17129 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.76s\n", - "529120 Examples seen. Accuracy:0.9155 Error: 0.30580 Loss:0.35358 Threads: 8 Forward time: 4.99s Backward time: 3.40s Step time: 3.74s\n", - "529760 Examples seen. Accuracy:0.9171 Error: 0.20621 Loss:0.17324 Threads: 8 Forward time: 4.95s Backward time: 3.39s Step time: 3.75s\n", - "530400 Examples seen. Accuracy:0.9169 Error: 0.39283 Loss:0.45150 Threads: 8 Forward time: 4.98s Backward time: 3.44s Step time: 3.76s\n", - "531040 Examples seen. Accuracy:0.9176 Error: 0.23696 Loss:0.26160 Threads: 8 Forward time: 5.05s Backward time: 3.44s Step time: 3.79s\n", - "531680 Examples seen. Accuracy:0.9176 Error: 0.21844 Loss:0.25647 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.77s\n", - "532320 Examples seen. Accuracy:0.9177 Error: 0.21973 Loss:0.28256 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.77s\n", - "532960 Examples seen. Accuracy:0.9183 Error: 0.24940 Loss:0.24267 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 3.77s\n", - "533600 Examples seen. Accuracy:0.9178 Error: 0.33766 Loss:0.38351 Threads: 8 Forward time: 5.09s Backward time: 3.40s Step time: 3.80s\n", - "534240 Examples seen. Accuracy:0.9176 Error: 0.26446 Loss:0.25359 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 4.32s\n", - "534880 Examples seen. Accuracy:0.9192 Error: 0.23802 Loss:0.27079 Threads: 8 Forward time: 4.96s Backward time: 3.38s Step time: 3.69s\n", - "535520 Examples seen. Accuracy:0.9190 Error: 0.28623 Loss:0.34778 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.68s\n", - "536160 Examples seen. Accuracy:0.9180 Error: 0.28343 Loss:0.30509 Threads: 8 Forward time: 4.90s Backward time: 3.39s Step time: 3.68s\n", - "536800 Examples seen. Accuracy:0.9182 Error: 0.27138 Loss:0.35933 Threads: 8 Forward time: 4.91s Backward time: 3.35s Step time: 3.65s\n", - "537440 Examples seen. Accuracy:0.9176 Error: 0.16258 Loss:0.15319 Threads: 8 Forward time: 4.90s Backward time: 3.36s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "538080 Examples seen. Accuracy:0.9185 Error: 0.14722 Loss:0.13124 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.65s\n", - "538720 Examples seen. Accuracy:0.9195 Error: 0.35435 Loss:0.51405 Threads: 8 Forward time: 4.90s Backward time: 3.39s Step time: 3.65s\n", - "539360 Examples seen. Accuracy:0.9189 Error: 0.18575 Loss:0.15035 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.64s\n", - "540000 Examples seen. Accuracy:0.9187 Error: 0.27777 Loss:0.30759 Threads: 8 Forward time: 4.89s Backward time: 3.39s Step time: 3.66s\n", - "540640 Examples seen. Accuracy:0.9191 Error: 0.17585 Loss:0.15341 Threads: 8 Forward time: 4.92s Backward time: 3.38s Step time: 3.68s\n", - "541280 Examples seen. Accuracy:0.9190 Error: 0.23668 Loss:0.28170 Threads: 8 Forward time: 4.95s Backward time: 3.40s Step time: 3.69s\n", - "541920 Examples seen. Accuracy:0.9200 Error: 0.21885 Loss:0.32181 Threads: 8 Forward time: 4.90s Backward time: 3.41s Step time: 3.69s\n", - "542560 Examples seen. Accuracy:0.9198 Error: 0.21759 Loss:0.29533 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.70s\n", - "543200 Examples seen. Accuracy:0.9205 Error: 0.18567 Loss:0.17029 Threads: 8 Forward time: 5.00s Backward time: 3.42s Step time: 3.74s\n", - "543840 Examples seen. Accuracy:0.9186 Error: 0.30721 Loss:0.38167 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.76s\n", - "544480 Examples seen. Accuracy:0.9190 Error: 0.24838 Loss:0.23080 Threads: 8 Forward time: 4.98s Backward time: 3.37s Step time: 3.65s\n", - "545120 Examples seen. Accuracy:0.9193 Error: 0.14867 Loss:0.10943 Threads: 8 Forward time: 4.91s Backward time: 3.37s Step time: 3.67s\n", - "545760 Examples seen. Accuracy:0.9205 Error: 0.19442 Loss:0.33386 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.66s\n", - "546400 Examples seen. Accuracy:0.9197 Error: 0.21721 Loss:0.21738 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.65s\n", - "547040 Examples seen. Accuracy:0.9198 Error: 0.19133 Loss:0.15499 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.66s\n", - "547680 Examples seen. Accuracy:0.9186 Error: 0.34019 Loss:0.42148 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.75s\n", - "548320 Examples seen. Accuracy:0.9152 Error: 0.22790 Loss:0.23251 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.73s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 11 Examples seen:548944 Validation Accuracy: 0.9518 Validation Error: 0.1524 Validation Loss: 0.1459 Total time: 62.29min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 11. Working time: 1.04 hours.\n", - "549584 Examples seen. Accuracy:0.9144 Error: 0.23092 Loss:0.36938 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.72s\n", - "550224 Examples seen. Accuracy:0.9128 Error: 0.26194 Loss:0.20998 Threads: 8 Forward time: 5.03s Backward time: 3.43s Step time: 3.70s\n", - "550864 Examples seen. Accuracy:0.9119 Error: 0.25531 Loss:0.36814 Threads: 8 Forward time: 5.03s Backward time: 3.40s Step time: 3.74s\n", - "551504 Examples seen. Accuracy:0.9126 Error: 0.18326 Loss:0.19310 Threads: 8 Forward time: 5.01s Backward time: 3.41s Step time: 3.73s\n", - "552144 Examples seen. Accuracy:0.9117 Error: 0.32824 Loss:0.36186 Threads: 8 Forward time: 5.11s Backward time: 3.45s Step time: 3.76s\n", - "552784 Examples seen. Accuracy:0.9117 Error: 0.25053 Loss:0.19268 Threads: 8 Forward time: 5.06s Backward time: 3.52s Step time: 3.78s\n", - "553424 Examples seen. Accuracy:0.9116 Error: 0.15198 Loss:0.15024 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.75s\n", - "554064 Examples seen. Accuracy:0.9123 Error: 0.23473 Loss:0.24098 Threads: 8 Forward time: 4.94s Backward time: 3.38s Step time: 3.73s\n", - "554704 Examples seen. Accuracy:0.9116 Error: 0.23039 Loss:0.32462 Threads: 8 Forward time: 4.95s Backward time: 3.38s Step time: 3.71s\n", - "555344 Examples seen. Accuracy:0.9129 Error: 0.20236 Loss:0.16263 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.72s\n", - "555984 Examples seen. Accuracy:0.9142 Error: 0.20465 Loss:0.21534 Threads: 8 Forward time: 4.99s Backward time: 3.39s Step time: 3.73s\n", - "556624 Examples seen. Accuracy:0.9136 Error: 0.26839 Loss:0.25619 Threads: 8 Forward time: 5.05s Backward time: 3.39s Step time: 3.73s\n", - "557264 Examples seen. Accuracy:0.9130 Error: 0.35622 Loss:0.40614 Threads: 8 Forward time: 5.03s Backward time: 3.38s Step time: 3.76s\n", - "557904 Examples seen. Accuracy:0.9126 Error: 0.23685 Loss:0.22980 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.72s\n", - "558544 Examples seen. Accuracy:0.9111 Error: 0.16157 Loss:0.11267 Threads: 8 Forward time: 5.05s Backward time: 3.38s Step time: 3.71s\n", - "559184 Examples seen. Accuracy:0.9116 Error: 0.28721 Loss:0.23702 Threads: 8 Forward time: 5.00s Backward time: 3.40s Step time: 4.46s\n", - "559824 Examples seen. Accuracy:0.9134 Error: 0.15638 Loss:0.11443 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.99s\n", - "560464 Examples seen. Accuracy:0.9139 Error: 0.20917 Loss:0.35016 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.86s\n", - "561104 Examples seen. Accuracy:0.9149 Error: 0.20351 Loss:0.14927 Threads: 8 Forward time: 5.00s Backward time: 3.40s Step time: 3.71s\n", - "561744 Examples seen. Accuracy:0.9146 Error: 0.31762 Loss:0.28682 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.65s\n", - "562384 Examples seen. Accuracy:0.9139 Error: 0.27747 Loss:0.36727 Threads: 8 Forward time: 4.90s Backward time: 3.36s Step time: 3.65s\n", - "563024 Examples seen. Accuracy:0.9138 Error: 0.21675 Loss:0.27570 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.67s\n", - "563664 Examples seen. Accuracy:0.9146 Error: 0.20100 Loss:0.19049 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.66s\n", - "564304 Examples seen. Accuracy:0.9150 Error: 0.22765 Loss:0.24695 Threads: 8 Forward time: 5.01s Backward time: 3.36s Step time: 3.67s\n", - "564944 Examples seen. Accuracy:0.9179 Error: 0.27237 Loss:0.26024 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.67s\n", - "565584 Examples seen. Accuracy:0.9172 Error: 0.25457 Loss:0.29427 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.66s\n", - "566224 Examples seen. Accuracy:0.9172 Error: 0.20643 Loss:0.17602 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.64s\n", - "566864 Examples seen. Accuracy:0.9160 Error: 0.15186 Loss:0.11778 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.68s\n", - "567504 Examples seen. Accuracy:0.9167 Error: 0.14150 Loss:0.28891 Threads: 8 Forward time: 4.92s Backward time: 3.34s Step time: 3.66s\n", - "568144 Examples seen. Accuracy:0.9153 Error: 0.20137 Loss:0.14648 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.68s\n", - "568784 Examples seen. Accuracy:0.9158 Error: 0.19455 Loss:0.15975 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.72s\n", - "569424 Examples seen. Accuracy:0.9149 Error: 0.32549 Loss:0.38222 Threads: 8 Forward time: 4.90s Backward time: 3.40s Step time: 3.66s\n", - "570064 Examples seen. Accuracy:0.9161 Error: 0.23548 Loss:0.22350 Threads: 8 Forward time: 4.94s Backward time: 3.42s Step time: 3.67s\n", - "570704 Examples seen. Accuracy:0.9177 Error: 0.16450 Loss:0.13579 Threads: 8 Forward time: 4.89s Backward time: 3.40s Step time: 3.68s\n", - "571344 Examples seen. Accuracy:0.9192 Error: 0.14622 Loss:0.11167 Threads: 8 Forward time: 4.90s Backward time: 3.38s Step time: 3.66s\n", - "571984 Examples seen. Accuracy:0.9217 Error: 0.25907 Loss:0.35041 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.68s\n", - "572624 Examples seen. Accuracy:0.9191 Error: 0.20682 Loss:0.21008 Threads: 8 Forward time: 4.94s Backward time: 3.43s Step time: 3.69s\n", - "573264 Examples seen. Accuracy:0.9191 Error: 0.15297 Loss:0.14154 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.71s\n", - "573904 Examples seen. Accuracy:0.9205 Error: 0.19884 Loss:0.16167 Threads: 8 Forward time: 4.94s Backward time: 3.38s Step time: 3.70s\n", - "574544 Examples seen. Accuracy:0.9207 Error: 0.24615 Loss:0.21966 Threads: 8 Forward time: 5.02s Backward time: 3.41s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "575184 Examples seen. Accuracy:0.9192 Error: 0.40491 Loss:0.56969 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.72s\n", - "575824 Examples seen. Accuracy:0.9192 Error: 0.27584 Loss:0.24388 Threads: 8 Forward time: 4.89s Backward time: 3.39s Step time: 3.67s\n", - "576464 Examples seen. Accuracy:0.9193 Error: 0.20617 Loss:0.18758 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.67s\n", - "577104 Examples seen. Accuracy:0.9192 Error: 0.21042 Loss:0.17100 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.66s\n", - "577744 Examples seen. Accuracy:0.9173 Error: 0.27497 Loss:0.29829 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.66s\n", - "578384 Examples seen. Accuracy:0.9188 Error: 0.16427 Loss:0.12574 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.65s\n", - "579024 Examples seen. Accuracy:0.9191 Error: 0.15256 Loss:0.13585 Threads: 8 Forward time: 4.91s Backward time: 3.37s Step time: 3.64s\n", - "579664 Examples seen. Accuracy:0.9182 Error: 0.21550 Loss:0.28664 Threads: 8 Forward time: 5.01s Backward time: 3.37s Step time: 3.71s\n", - "580304 Examples seen. Accuracy:0.9192 Error: 0.23448 Loss:0.22764 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.77s\n", - "580944 Examples seen. Accuracy:0.9195 Error: 0.16520 Loss:0.19715 Threads: 8 Forward time: 5.03s Backward time: 3.40s Step time: 3.72s\n", - "581584 Examples seen. Accuracy:0.9188 Error: 0.23176 Loss:0.23918 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.71s\n", - "582224 Examples seen. Accuracy:0.9188 Error: 0.27296 Loss:0.27981 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.67s\n", - "582864 Examples seen. Accuracy:0.9182 Error: 0.18141 Loss:0.16519 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.69s\n", - "583504 Examples seen. Accuracy:0.9176 Error: 0.18723 Loss:0.19415 Threads: 8 Forward time: 4.95s Backward time: 3.36s Step time: 3.68s\n", - "584144 Examples seen. Accuracy:0.9172 Error: 0.24776 Loss:0.29961 Threads: 8 Forward time: 4.97s Backward time: 3.43s Step time: 3.69s\n", - "584784 Examples seen. Accuracy:0.9171 Error: 0.24251 Loss:0.23034 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.68s\n", - "585424 Examples seen. Accuracy:0.9177 Error: 0.13297 Loss:0.13619 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.70s\n", - "586064 Examples seen. Accuracy:0.9183 Error: 0.20108 Loss:0.25198 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.69s\n", - "586704 Examples seen. Accuracy:0.9183 Error: 0.24632 Loss:0.18730 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.68s\n", - "587344 Examples seen. Accuracy:0.9184 Error: 0.20645 Loss:0.23060 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.68s\n", - "587984 Examples seen. Accuracy:0.9182 Error: 0.10230 Loss:0.06580 Threads: 8 Forward time: 4.98s Backward time: 3.39s Step time: 3.69s\n", - "588624 Examples seen. Accuracy:0.9196 Error: 0.16517 Loss:0.17498 Threads: 8 Forward time: 5.00s Backward time: 3.38s Step time: 3.69s\n", - "589264 Examples seen. Accuracy:0.9203 Error: 0.16410 Loss:0.18330 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 4.39s\n", - "589904 Examples seen. Accuracy:0.9213 Error: 0.27152 Loss:0.27096 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.73s\n", - "590544 Examples seen. Accuracy:0.9222 Error: 0.12742 Loss:0.11996 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.74s\n", - "591184 Examples seen. Accuracy:0.9221 Error: 0.22901 Loss:0.21949 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.76s\n", - "591824 Examples seen. Accuracy:0.9219 Error: 0.14309 Loss:0.10733 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.76s\n", - "592464 Examples seen. Accuracy:0.9195 Error: 0.30420 Loss:0.28920 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.73s\n", - "593104 Examples seen. Accuracy:0.9203 Error: 0.17870 Loss:0.15471 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.76s\n", - "593744 Examples seen. Accuracy:0.9186 Error: 0.29441 Loss:0.43107 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.69s\n", - "594384 Examples seen. Accuracy:0.9176 Error: 0.26896 Loss:0.24342 Threads: 8 Forward time: 5.00s Backward time: 3.38s Step time: 3.71s\n", - "595024 Examples seen. Accuracy:0.9183 Error: 0.14405 Loss:0.12767 Threads: 8 Forward time: 4.91s Backward time: 3.34s Step time: 3.69s\n", - "595664 Examples seen. Accuracy:0.9189 Error: 0.23333 Loss:0.31450 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.68s\n", - "596304 Examples seen. Accuracy:0.9183 Error: 0.26393 Loss:0.32661 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.69s\n", - "596944 Examples seen. Accuracy:0.9191 Error: 0.19612 Loss:0.14478 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.69s\n", - "597584 Examples seen. Accuracy:0.9217 Error: 0.16814 Loss:0.16207 Threads: 8 Forward time: 4.99s Backward time: 3.40s Step time: 3.70s\n", - "598224 Examples seen. Accuracy:0.9232 Error: 0.30825 Loss:0.31310 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.70s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 12 Examples seen:598848 Validation Accuracy: 0.9594 Validation Error: 0.1270 Validation Loss: 0.1242 Total time: 67.61min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 12. Working time: 1.13 hours.\n", - "599488 Examples seen. Accuracy:0.9242 Error: 0.10997 Loss:0.09514 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.77s\n", - "600128 Examples seen. Accuracy:0.9227 Error: 0.32213 Loss:0.41548 Threads: 8 Forward time: 4.92s Backward time: 3.37s Step time: 3.71s\n", - "600768 Examples seen. Accuracy:0.9228 Error: 0.21764 Loss:0.22738 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.69s\n", - "601408 Examples seen. Accuracy:0.9243 Error: 0.25922 Loss:0.25940 Threads: 8 Forward time: 4.98s Backward time: 3.39s Step time: 3.71s\n", - "602048 Examples seen. Accuracy:0.9238 Error: 0.30416 Loss:0.32473 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.71s\n", - "602688 Examples seen. Accuracy:0.9245 Error: 0.24544 Loss:0.25958 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.75s\n", - "603328 Examples seen. Accuracy:0.9231 Error: 0.17887 Loss:0.20883 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.70s\n", - "603968 Examples seen. Accuracy:0.9252 Error: 0.13774 Loss:0.14632 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.69s\n", - "604608 Examples seen. Accuracy:0.9265 Error: 0.30910 Loss:0.27277 Threads: 8 Forward time: 4.89s Backward time: 3.33s Step time: 3.68s\n", - "605248 Examples seen. Accuracy:0.9282 Error: 0.14430 Loss:0.12108 Threads: 8 Forward time: 5.02s Backward time: 3.37s Step time: 3.70s\n", - "605888 Examples seen. Accuracy:0.9287 Error: 0.19664 Loss:0.23849 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.74s\n", - "606528 Examples seen. Accuracy:0.9280 Error: 0.16660 Loss:0.15280 Threads: 8 Forward time: 4.99s Backward time: 3.38s Step time: 3.72s\n", - "607168 Examples seen. Accuracy:0.9265 Error: 0.22760 Loss:0.23802 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.71s\n", - "607808 Examples seen. Accuracy:0.9254 Error: 0.25173 Loss:0.22797 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.71s\n", - "608448 Examples seen. Accuracy:0.9248 Error: 0.11139 Loss:0.07290 Threads: 8 Forward time: 4.97s Backward time: 3.36s Step time: 3.70s\n", - "609088 Examples seen. Accuracy:0.9246 Error: 0.27117 Loss:0.27656 Threads: 8 Forward time: 4.95s Backward time: 3.36s Step time: 3.70s\n", - "609728 Examples seen. Accuracy:0.9260 Error: 0.08587 Loss:0.05641 Threads: 8 Forward time: 4.98s Backward time: 3.36s Step time: 3.70s\n", - "610368 Examples seen. Accuracy:0.9270 Error: 0.24453 Loss:0.32479 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.73s\n", - "611008 Examples seen. Accuracy:0.9248 Error: 0.32945 Loss:0.47009 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.73s\n", - "611648 Examples seen. Accuracy:0.9256 Error: 0.22456 Loss:0.23669 Threads: 8 Forward time: 5.05s Backward time: 3.38s Step time: 3.76s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "612288 Examples seen. Accuracy:0.9245 Error: 0.24345 Loss:0.28888 Threads: 8 Forward time: 4.99s Backward time: 3.38s Step time: 3.74s\n", - "612928 Examples seen. Accuracy:0.9232 Error: 0.33060 Loss:0.44335 Threads: 8 Forward time: 5.06s Backward time: 3.40s Step time: 3.73s\n", - "613568 Examples seen. Accuracy:0.9238 Error: 0.18371 Loss:0.17017 Threads: 8 Forward time: 5.08s Backward time: 3.37s Step time: 3.73s\n", - "614208 Examples seen. Accuracy:0.9242 Error: 0.14690 Loss:0.11882 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 4.40s\n", - "614848 Examples seen. Accuracy:0.9242 Error: 0.21024 Loss:0.28923 Threads: 8 Forward time: 4.97s Backward time: 3.35s Step time: 3.70s\n", - "615488 Examples seen. Accuracy:0.9242 Error: 0.13054 Loss:0.11231 Threads: 8 Forward time: 5.01s Backward time: 3.38s Step time: 3.80s\n", - "616128 Examples seen. Accuracy:0.9258 Error: 0.21791 Loss:0.17394 Threads: 8 Forward time: 5.03s Backward time: 3.42s Step time: 3.79s\n", - "616768 Examples seen. Accuracy:0.9258 Error: 0.22210 Loss:0.26650 Threads: 8 Forward time: 5.05s Backward time: 3.38s Step time: 3.75s\n", - "617408 Examples seen. Accuracy:0.9242 Error: 0.17192 Loss:0.14012 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.74s\n", - "618048 Examples seen. Accuracy:0.9225 Error: 0.25511 Loss:0.29246 Threads: 8 Forward time: 5.02s Backward time: 3.39s Step time: 3.73s\n", - "618688 Examples seen. Accuracy:0.9211 Error: 0.29894 Loss:0.30944 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.72s\n", - "619328 Examples seen. Accuracy:0.9197 Error: 0.16383 Loss:0.15550 Threads: 8 Forward time: 4.98s Backward time: 3.36s Step time: 3.72s\n", - "619968 Examples seen. Accuracy:0.9218 Error: 0.19566 Loss:0.19699 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.73s\n", - "620608 Examples seen. Accuracy:0.9223 Error: 0.20551 Loss:0.26143 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.80s\n", - "621248 Examples seen. Accuracy:0.9236 Error: 0.20055 Loss:0.17781 Threads: 8 Forward time: 4.96s Backward time: 3.39s Step time: 3.78s\n", - "621888 Examples seen. Accuracy:0.9234 Error: 0.22115 Loss:0.19203 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.73s\n", - "622528 Examples seen. Accuracy:0.9242 Error: 0.11351 Loss:0.18863 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.72s\n", - "623168 Examples seen. Accuracy:0.9247 Error: 0.15576 Loss:0.11963 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.73s\n", - "623808 Examples seen. Accuracy:0.9238 Error: 0.19279 Loss:0.20185 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.73s\n", - "624448 Examples seen. Accuracy:0.9236 Error: 0.17985 Loss:0.14372 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.72s\n", - "625088 Examples seen. Accuracy:0.9241 Error: 0.22430 Loss:0.15522 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.76s\n", - "625728 Examples seen. Accuracy:0.9241 Error: 0.26919 Loss:0.29446 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.71s\n", - "626368 Examples seen. Accuracy:0.9237 Error: 0.12659 Loss:0.08955 Threads: 8 Forward time: 5.01s Backward time: 3.38s Step time: 3.72s\n", - "627008 Examples seen. Accuracy:0.9231 Error: 0.27613 Loss:0.36906 Threads: 8 Forward time: 4.94s Backward time: 3.34s Step time: 3.70s\n", - "627648 Examples seen. Accuracy:0.9245 Error: 0.17259 Loss:0.13135 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 3.69s\n", - "628288 Examples seen. Accuracy:0.9232 Error: 0.28986 Loss:0.45926 Threads: 8 Forward time: 4.92s Backward time: 3.32s Step time: 3.70s\n", - "628928 Examples seen. Accuracy:0.9246 Error: 0.21001 Loss:0.15484 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.67s\n", - "629568 Examples seen. Accuracy:0.9248 Error: 0.10011 Loss:0.10467 Threads: 8 Forward time: 4.94s Backward time: 3.35s Step time: 3.73s\n", - "630208 Examples seen. Accuracy:0.9253 Error: 0.26053 Loss:0.30016 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.77s\n", - "630848 Examples seen. Accuracy:0.9247 Error: 0.13562 Loss:0.09931 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.81s\n", - "631488 Examples seen. Accuracy:0.9234 Error: 0.29311 Loss:0.41956 Threads: 8 Forward time: 5.00s Backward time: 3.35s Step time: 3.77s\n", - "632128 Examples seen. Accuracy:0.9227 Error: 0.32604 Loss:0.32537 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.68s\n", - "632768 Examples seen. Accuracy:0.9234 Error: 0.21469 Loss:0.17809 Threads: 8 Forward time: 4.94s Backward time: 3.34s Step time: 3.71s\n", - "633408 Examples seen. Accuracy:0.9238 Error: 0.19012 Loss:0.20322 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.68s\n", - "634048 Examples seen. Accuracy:0.9231 Error: 0.23476 Loss:0.24449 Threads: 8 Forward time: 5.02s Backward time: 3.39s Step time: 3.73s\n", - "634688 Examples seen. Accuracy:0.9230 Error: 0.24315 Loss:0.22827 Threads: 8 Forward time: 5.03s Backward time: 3.34s Step time: 3.70s\n", - "635328 Examples seen. Accuracy:0.9228 Error: 0.17972 Loss:0.12513 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.73s\n", - "635968 Examples seen. Accuracy:0.9235 Error: 0.24447 Loss:0.23741 Threads: 8 Forward time: 4.97s Backward time: 3.33s Step time: 3.68s\n", - "636608 Examples seen. Accuracy:0.9243 Error: 0.17920 Loss:0.14483 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "637248 Examples seen. Accuracy:0.9234 Error: 0.17000 Loss:0.12153 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.72s\n", - "637888 Examples seen. Accuracy:0.9207 Error: 0.25757 Loss:0.25668 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.81s\n", - "638528 Examples seen. Accuracy:0.9202 Error: 0.24584 Loss:0.42478 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.71s\n", - "639168 Examples seen. Accuracy:0.9214 Error: 0.23140 Loss:0.20725 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.70s\n", - "639808 Examples seen. Accuracy:0.9216 Error: 0.21041 Loss:0.24327 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 3.68s\n", - "640448 Examples seen. Accuracy:0.9233 Error: 0.20852 Loss:0.23347 Threads: 8 Forward time: 4.97s Backward time: 3.35s Step time: 3.69s\n", - "641088 Examples seen. Accuracy:0.9224 Error: 0.28905 Loss:0.25250 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.71s\n", - "641728 Examples seen. Accuracy:0.9209 Error: 0.24816 Loss:0.28803 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.73s\n", - "642368 Examples seen. Accuracy:0.9201 Error: 0.27818 Loss:0.25208 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.71s\n", - "643008 Examples seen. Accuracy:0.9187 Error: 0.29237 Loss:0.25005 Threads: 8 Forward time: 5.21s Backward time: 3.44s Step time: 3.72s\n", - "643648 Examples seen. Accuracy:0.9206 Error: 0.24957 Loss:0.23271 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 4.29s\n", - "644288 Examples seen. Accuracy:0.9192 Error: 0.31494 Loss:0.37084 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.69s\n", - "644928 Examples seen. Accuracy:0.9205 Error: 0.18692 Loss:0.13481 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.70s\n", - "645568 Examples seen. Accuracy:0.9198 Error: 0.28008 Loss:0.31609 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.74s\n", - "646208 Examples seen. Accuracy:0.9192 Error: 0.27794 Loss:0.32324 Threads: 8 Forward time: 4.98s Backward time: 3.36s Step time: 3.72s\n", - "646848 Examples seen. Accuracy:0.9176 Error: 0.25645 Loss:0.19357 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.69s\n", - "647488 Examples seen. Accuracy:0.9188 Error: 0.18331 Loss:0.19487 Threads: 8 Forward time: 4.91s Backward time: 3.32s Step time: 3.67s\n", - "648128 Examples seen. Accuracy:0.9190 Error: 0.14423 Loss:0.32105 Threads: 8 Forward time: 4.91s Backward time: 3.33s Step time: 3.68s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 13 Examples seen:648752 Validation Accuracy: 0.9612 Validation Error: 0.1139 Validation Loss: 0.1133 Total time: 72.94min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 13. Working time: 1.22 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "649392 Examples seen. Accuracy:0.9175 Error: 0.26981 Loss:0.25934 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.76s\n", - "650032 Examples seen. Accuracy:0.9172 Error: 0.18490 Loss:0.21110 Threads: 8 Forward time: 4.92s Backward time: 3.33s Step time: 3.78s\n", - "650672 Examples seen. Accuracy:0.9186 Error: 0.17890 Loss:0.21293 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.69s\n", - "651312 Examples seen. Accuracy:0.9188 Error: 0.35319 Loss:0.40238 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.73s\n", - "651952 Examples seen. Accuracy:0.9205 Error: 0.22089 Loss:0.26061 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.72s\n", - "652592 Examples seen. Accuracy:0.9211 Error: 0.15760 Loss:0.12604 Threads: 8 Forward time: 5.04s Backward time: 3.34s Step time: 3.73s\n", - "653232 Examples seen. Accuracy:0.9209 Error: 0.27305 Loss:0.43929 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.71s\n", - "653872 Examples seen. Accuracy:0.9202 Error: 0.29223 Loss:0.26970 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.99s\n", - "654512 Examples seen. Accuracy:0.9196 Error: 0.29819 Loss:0.37527 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 4.01s\n", - "655152 Examples seen. Accuracy:0.9193 Error: 0.19897 Loss:0.19135 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 4.17s\n", - "655792 Examples seen. Accuracy:0.9210 Error: 0.14533 Loss:0.10307 Threads: 8 Forward time: 4.97s Backward time: 3.33s Step time: 3.71s\n", - "656432 Examples seen. Accuracy:0.9221 Error: 0.19749 Loss:0.15923 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.72s\n", - "657072 Examples seen. Accuracy:0.9224 Error: 0.24243 Loss:0.29140 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.67s\n", - "657712 Examples seen. Accuracy:0.9245 Error: 0.15944 Loss:0.14549 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.66s\n", - "658352 Examples seen. Accuracy:0.9241 Error: 0.27938 Loss:0.34723 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.66s\n", - "658992 Examples seen. Accuracy:0.9238 Error: 0.21748 Loss:0.16661 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.68s\n", - "659632 Examples seen. Accuracy:0.9251 Error: 0.27197 Loss:0.31927 Threads: 8 Forward time: 5.04s Backward time: 3.36s Step time: 3.69s\n", - "660272 Examples seen. Accuracy:0.9233 Error: 0.31591 Loss:0.44622 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.73s\n", - "660912 Examples seen. Accuracy:0.9225 Error: 0.27785 Loss:0.32595 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.71s\n", - "661552 Examples seen. Accuracy:0.9228 Error: 0.17853 Loss:0.20782 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.73s\n", - "662192 Examples seen. Accuracy:0.9245 Error: 0.24496 Loss:0.24084 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.66s\n", - "662832 Examples seen. Accuracy:0.9246 Error: 0.23309 Loss:0.17473 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.66s\n", - "663472 Examples seen. Accuracy:0.9258 Error: 0.16811 Loss:0.13569 Threads: 8 Forward time: 5.09s Backward time: 3.36s Step time: 3.66s\n", - "664112 Examples seen. Accuracy:0.9248 Error: 0.28688 Loss:0.42061 Threads: 8 Forward time: 5.05s Backward time: 3.35s Step time: 3.72s\n", - "664752 Examples seen. Accuracy:0.9239 Error: 0.26790 Loss:0.27782 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.67s\n", - "665392 Examples seen. Accuracy:0.9231 Error: 0.28737 Loss:0.30998 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.66s\n", - "666032 Examples seen. Accuracy:0.9251 Error: 0.19994 Loss:0.14584 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.69s\n", - "666672 Examples seen. Accuracy:0.9242 Error: 0.15484 Loss:0.17790 Threads: 8 Forward time: 5.02s Backward time: 3.35s Step time: 3.70s\n", - "667312 Examples seen. Accuracy:0.9230 Error: 0.22248 Loss:0.26383 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.69s\n", - "667952 Examples seen. Accuracy:0.9235 Error: 0.17661 Loss:0.15604 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.69s\n", - "668592 Examples seen. Accuracy:0.9228 Error: 0.22582 Loss:0.19423 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 4.29s\n", - "669232 Examples seen. Accuracy:0.9230 Error: 0.19354 Loss:0.18550 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.74s\n", - "669872 Examples seen. Accuracy:0.9235 Error: 0.15994 Loss:0.11418 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.73s\n", - "670512 Examples seen. Accuracy:0.9242 Error: 0.21230 Loss:0.20893 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.72s\n", - "671152 Examples seen. Accuracy:0.9247 Error: 0.17924 Loss:0.18412 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.69s\n", - "671792 Examples seen. Accuracy:0.9246 Error: 0.30468 Loss:0.32864 Threads: 8 Forward time: 4.92s Backward time: 3.34s Step time: 3.74s\n", - "672432 Examples seen. Accuracy:0.9233 Error: 0.21060 Loss:0.21091 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.67s\n", - "673072 Examples seen. Accuracy:0.9224 Error: 0.19358 Loss:0.21268 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.68s\n", - "673712 Examples seen. Accuracy:0.9223 Error: 0.31135 Loss:0.29611 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.68s\n", - "674352 Examples seen. Accuracy:0.9224 Error: 0.23413 Loss:0.25672 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.67s\n", - "674992 Examples seen. Accuracy:0.9249 Error: 0.18648 Loss:0.16315 Threads: 8 Forward time: 5.01s Backward time: 3.35s Step time: 3.73s\n", - "675632 Examples seen. Accuracy:0.9245 Error: 0.21367 Loss:0.20568 Threads: 8 Forward time: 4.96s Backward time: 3.36s Step time: 4.01s\n", - "676272 Examples seen. Accuracy:0.9255 Error: 0.15781 Loss:0.11221 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.72s\n", - "676912 Examples seen. Accuracy:0.9257 Error: 0.26989 Loss:0.30329 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.69s\n", - "677552 Examples seen. Accuracy:0.9249 Error: 0.24495 Loss:0.32354 Threads: 8 Forward time: 4.94s Backward time: 3.34s Step time: 3.69s\n", - "678192 Examples seen. Accuracy:0.9253 Error: 0.20756 Loss:0.21230 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.68s\n", - "678832 Examples seen. Accuracy:0.9262 Error: 0.15825 Loss:0.15859 Threads: 8 Forward time: 4.98s Backward time: 3.37s Step time: 3.69s\n", - "679472 Examples seen. Accuracy:0.9260 Error: 0.23991 Loss:0.22751 Threads: 8 Forward time: 4.90s Backward time: 3.36s Step time: 3.68s\n", - "680112 Examples seen. Accuracy:0.9262 Error: 0.17398 Loss:0.14430 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.68s\n", - "680752 Examples seen. Accuracy:0.9251 Error: 0.27289 Loss:0.30962 Threads: 8 Forward time: 5.00s Backward time: 3.35s Step time: 3.69s\n", - "681392 Examples seen. Accuracy:0.9256 Error: 0.18921 Loss:0.24007 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.69s\n", - "682032 Examples seen. Accuracy:0.9251 Error: 0.17527 Loss:0.15538 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "682672 Examples seen. Accuracy:0.9246 Error: 0.21379 Loss:0.34513 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.68s\n", - "683312 Examples seen. Accuracy:0.9218 Error: 0.19323 Loss:0.14895 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 3.69s\n", - "683952 Examples seen. Accuracy:0.9201 Error: 0.13746 Loss:0.16388 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.69s\n", - "684592 Examples seen. Accuracy:0.9201 Error: 0.14745 Loss:0.11056 Threads: 8 Forward time: 4.94s Backward time: 3.33s Step time: 3.69s\n", - "685232 Examples seen. Accuracy:0.9212 Error: 0.09946 Loss:0.07089 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.69s\n", - "685872 Examples seen. Accuracy:0.9194 Error: 0.15444 Loss:0.18586 Threads: 8 Forward time: 5.02s Backward time: 3.36s Step time: 3.77s\n", - "686512 Examples seen. Accuracy:0.9198 Error: 0.22131 Loss:0.22950 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.77s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "687152 Examples seen. Accuracy:0.9197 Error: 0.20431 Loss:0.20188 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.74s\n", - "687792 Examples seen. Accuracy:0.9197 Error: 0.20788 Loss:0.18897 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.80s\n", - "688432 Examples seen. Accuracy:0.9209 Error: 0.18472 Loss:0.17458 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.74s\n", - "689072 Examples seen. Accuracy:0.9209 Error: 0.19537 Loss:0.20380 Threads: 8 Forward time: 5.04s Backward time: 3.38s Step time: 3.73s\n", - "689712 Examples seen. Accuracy:0.9186 Error: 0.27032 Loss:0.33264 Threads: 8 Forward time: 5.02s Backward time: 3.39s Step time: 3.77s\n", - "690352 Examples seen. Accuracy:0.9178 Error: 0.28204 Loss:0.29697 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.77s\n", - "690992 Examples seen. Accuracy:0.9181 Error: 0.20187 Loss:0.18912 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.74s\n", - "691632 Examples seen. Accuracy:0.9179 Error: 0.18593 Loss:0.18831 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.74s\n", - "692272 Examples seen. Accuracy:0.9188 Error: 0.20844 Loss:0.20645 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.74s\n", - "692912 Examples seen. Accuracy:0.9194 Error: 0.22447 Loss:0.23672 Threads: 8 Forward time: 4.98s Backward time: 3.37s Step time: 3.73s\n", - "693552 Examples seen. Accuracy:0.9203 Error: 0.22415 Loss:0.20642 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.80s\n", - "694192 Examples seen. Accuracy:0.9202 Error: 0.22189 Loss:0.19755 Threads: 8 Forward time: 5.05s Backward time: 3.36s Step time: 3.78s\n", - "694832 Examples seen. Accuracy:0.9209 Error: 0.10455 Loss:0.11997 Threads: 8 Forward time: 5.03s Backward time: 3.35s Step time: 3.76s\n", - "695472 Examples seen. Accuracy:0.9218 Error: 0.09665 Loss:0.07078 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.78s\n", - "696112 Examples seen. Accuracy:0.9229 Error: 0.18862 Loss:0.15474 Threads: 8 Forward time: 5.07s Backward time: 3.35s Step time: 3.76s\n", - "696752 Examples seen. Accuracy:0.9232 Error: 0.17772 Loss:0.14124 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.77s\n", - "697392 Examples seen. Accuracy:0.9257 Error: 0.23398 Loss:0.21990 Threads: 8 Forward time: 5.05s Backward time: 3.37s Step time: 3.80s\n", - "698032 Examples seen. Accuracy:0.9250 Error: 0.29578 Loss:0.31758 Threads: 8 Forward time: 5.04s Backward time: 3.35s Step time: 4.42s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 14 Examples seen:698656 Validation Accuracy: 0.9634 Validation Error: 0.1034 Validation Loss: 0.1017 Total time: 78.30min\n", - "Epoch time: 5.8 minutes. 100 epochs: 9.6 hours.\n", - "Epochs: 14. Working time: 1.3 hours.\n", - "699296 Examples seen. Accuracy:0.9235 Error: 0.22145 Loss:0.26296 Threads: 8 Forward time: 5.05s Backward time: 3.36s Step time: 3.76s\n", - "699936 Examples seen. Accuracy:0.9252 Error: 0.14014 Loss:0.09814 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.77s\n", - "700576 Examples seen. Accuracy:0.9256 Error: 0.30009 Loss:0.47461 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.76s\n", - "701216 Examples seen. Accuracy:0.9230 Error: 0.17862 Loss:0.15909 Threads: 8 Forward time: 4.94s Backward time: 3.33s Step time: 3.73s\n", - "701856 Examples seen. Accuracy:0.9235 Error: 0.25658 Loss:0.28110 Threads: 8 Forward time: 5.02s Backward time: 3.36s Step time: 3.74s\n", - "702496 Examples seen. Accuracy:0.9246 Error: 0.16752 Loss:0.24733 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.72s\n", - "703136 Examples seen. Accuracy:0.9254 Error: 0.23252 Loss:0.30819 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.71s\n", - "703776 Examples seen. Accuracy:0.9254 Error: 0.23237 Loss:0.21500 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.71s\n", - "704416 Examples seen. Accuracy:0.9260 Error: 0.20399 Loss:0.17635 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.75s\n", - "705056 Examples seen. Accuracy:0.9262 Error: 0.19453 Loss:0.22488 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.74s\n", - "705696 Examples seen. Accuracy:0.9277 Error: 0.14425 Loss:0.09093 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.74s\n", - "706336 Examples seen. Accuracy:0.9288 Error: 0.31189 Loss:0.32594 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.72s\n", - "706976 Examples seen. Accuracy:0.9285 Error: 0.13149 Loss:0.09739 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.70s\n", - "707616 Examples seen. Accuracy:0.9284 Error: 0.14378 Loss:0.21577 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.69s\n", - "708256 Examples seen. Accuracy:0.9282 Error: 0.17707 Loss:0.11789 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.70s\n", - "708896 Examples seen. Accuracy:0.9289 Error: 0.20129 Loss:0.22809 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.67s\n", - "709536 Examples seen. Accuracy:0.9288 Error: 0.12164 Loss:0.08824 Threads: 8 Forward time: 4.94s Backward time: 3.35s Step time: 3.67s\n", - "710176 Examples seen. Accuracy:0.9297 Error: 0.24316 Loss:0.26564 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.68s\n", - "710816 Examples seen. Accuracy:0.9283 Error: 0.20906 Loss:0.17778 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.71s\n", - "711456 Examples seen. Accuracy:0.9291 Error: 0.18636 Loss:0.14885 Threads: 8 Forward time: 5.00s Backward time: 3.38s Step time: 3.73s\n", - "712096 Examples seen. Accuracy:0.9277 Error: 0.14190 Loss:0.16963 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.75s\n", - "712736 Examples seen. Accuracy:0.9278 Error: 0.14451 Loss:0.15218 Threads: 8 Forward time: 5.01s Backward time: 3.38s Step time: 3.73s\n", - "713376 Examples seen. Accuracy:0.9261 Error: 0.16294 Loss:0.17784 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.71s\n", - "714016 Examples seen. Accuracy:0.9250 Error: 0.23963 Loss:0.24820 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.67s\n", - "714656 Examples seen. Accuracy:0.9250 Error: 0.11989 Loss:0.08585 Threads: 8 Forward time: 4.90s Backward time: 3.31s Step time: 3.68s\n", - "715296 Examples seen. Accuracy:0.9252 Error: 0.12969 Loss:0.12236 Threads: 8 Forward time: 4.96s Backward time: 3.34s Step time: 3.67s\n", - "715936 Examples seen. Accuracy:0.9257 Error: 0.12528 Loss:0.11375 Threads: 8 Forward time: 4.92s Backward time: 3.33s Step time: 3.67s\n", - "716576 Examples seen. Accuracy:0.9253 Error: 0.18179 Loss:0.36416 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.68s\n", - "717216 Examples seen. Accuracy:0.9264 Error: 0.21959 Loss:0.21867 Threads: 8 Forward time: 5.09s Backward time: 3.38s Step time: 3.70s\n", - "717856 Examples seen. Accuracy:0.9278 Error: 0.18928 Loss:0.13191 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.71s\n", - "718496 Examples seen. Accuracy:0.9263 Error: 0.22639 Loss:0.24938 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.69s\n", - "719136 Examples seen. Accuracy:0.9265 Error: 0.28791 Loss:0.29771 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.69s\n", - "719776 Examples seen. Accuracy:0.9274 Error: 0.24713 Loss:0.32485 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 3.70s\n", - "720416 Examples seen. Accuracy:0.9276 Error: 0.18207 Loss:0.17420 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.68s\n", - "721056 Examples seen. Accuracy:0.9286 Error: 0.20232 Loss:0.16635 Threads: 8 Forward time: 5.10s Backward time: 3.41s Step time: 3.81s\n", - "721696 Examples seen. Accuracy:0.9303 Error: 0.18548 Loss:0.18271 Threads: 8 Forward time: 5.10s Backward time: 3.36s Step time: 3.72s\n", - "722336 Examples seen. Accuracy:0.9307 Error: 0.18014 Loss:0.17023 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.72s\n", - "722976 Examples seen. Accuracy:0.9304 Error: 0.25271 Loss:0.26551 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.71s\n", - "723616 Examples seen. Accuracy:0.9295 Error: 0.15825 Loss:0.16284 Threads: 8 Forward time: 4.96s Backward time: 3.32s Step time: 3.70s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "724256 Examples seen. Accuracy:0.9309 Error: 0.20303 Loss:0.22516 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.71s\n", - "724896 Examples seen. Accuracy:0.9299 Error: 0.15713 Loss:0.13265 Threads: 8 Forward time: 5.06s Backward time: 3.32s Step time: 3.70s\n", - "725536 Examples seen. Accuracy:0.9316 Error: 0.11935 Loss:0.11442 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 4.36s\n", - "726176 Examples seen. Accuracy:0.9320 Error: 0.06989 Loss:0.04916 Threads: 8 Forward time: 4.97s Backward time: 3.34s Step time: 4.04s\n", - "726816 Examples seen. Accuracy:0.9312 Error: 0.24818 Loss:0.29031 Threads: 8 Forward time: 5.00s Backward time: 3.35s Step time: 3.78s\n", - "727456 Examples seen. Accuracy:0.9311 Error: 0.23920 Loss:0.30601 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.82s\n", - "728096 Examples seen. Accuracy:0.9302 Error: 0.23040 Loss:0.21368 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.69s\n", - "728736 Examples seen. Accuracy:0.9275 Error: 0.28287 Loss:0.27504 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "729376 Examples seen. Accuracy:0.9271 Error: 0.26658 Loss:0.32178 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.67s\n", - "730016 Examples seen. Accuracy:0.9272 Error: 0.05861 Loss:0.04194 Threads: 8 Forward time: 4.91s Backward time: 3.33s Step time: 3.68s\n", - "730656 Examples seen. Accuracy:0.9296 Error: 0.12672 Loss:0.08934 Threads: 8 Forward time: 5.04s Backward time: 3.35s Step time: 3.68s\n", - "731296 Examples seen. Accuracy:0.9292 Error: 0.20098 Loss:0.24593 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.73s\n", - "731936 Examples seen. Accuracy:0.9301 Error: 0.18390 Loss:0.16926 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.67s\n", - "732576 Examples seen. Accuracy:0.9311 Error: 0.18309 Loss:0.17629 Threads: 8 Forward time: 5.09s Backward time: 3.45s Step time: 3.72s\n", - "733216 Examples seen. Accuracy:0.9304 Error: 0.11153 Loss:0.09986 Threads: 8 Forward time: 4.95s Backward time: 3.37s Step time: 3.71s\n", - "733856 Examples seen. Accuracy:0.9311 Error: 0.18930 Loss:0.19622 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.69s\n", - "734496 Examples seen. Accuracy:0.9290 Error: 0.15428 Loss:0.22344 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.67s\n", - "735136 Examples seen. Accuracy:0.9263 Error: 0.17778 Loss:0.12305 Threads: 8 Forward time: 4.94s Backward time: 3.35s Step time: 3.71s\n", - "735776 Examples seen. Accuracy:0.9274 Error: 0.15163 Loss:0.11392 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.69s\n", - "736416 Examples seen. Accuracy:0.9273 Error: 0.16412 Loss:0.18380 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.68s\n", - "737056 Examples seen. Accuracy:0.9268 Error: 0.30519 Loss:0.31991 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.66s\n", - "737696 Examples seen. Accuracy:0.9262 Error: 0.21756 Loss:0.31419 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.67s\n", - "738336 Examples seen. Accuracy:0.9256 Error: 0.25406 Loss:0.24444 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.66s\n", - "738976 Examples seen. Accuracy:0.9270 Error: 0.11830 Loss:0.10572 Threads: 8 Forward time: 4.89s Backward time: 3.32s Step time: 3.66s\n", - "739616 Examples seen. Accuracy:0.9280 Error: 0.30708 Loss:0.32502 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.66s\n", - "740256 Examples seen. Accuracy:0.9275 Error: 0.19924 Loss:0.30101 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.66s\n", - "740896 Examples seen. Accuracy:0.9268 Error: 0.17401 Loss:0.25244 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.66s\n", - "741536 Examples seen. Accuracy:0.9267 Error: 0.24023 Loss:0.23901 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.70s\n", - "742176 Examples seen. Accuracy:0.9269 Error: 0.24492 Loss:0.21680 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.69s\n", - "742816 Examples seen. Accuracy:0.9278 Error: 0.22954 Loss:0.20757 Threads: 8 Forward time: 4.94s Backward time: 3.33s Step time: 3.67s\n", - "743456 Examples seen. Accuracy:0.9256 Error: 0.34231 Loss:0.50278 Threads: 8 Forward time: 4.90s Backward time: 3.31s Step time: 3.70s\n", - "744096 Examples seen. Accuracy:0.9246 Error: 0.36100 Loss:0.41389 Threads: 8 Forward time: 4.92s Backward time: 3.34s Step time: 3.73s\n", - "744736 Examples seen. Accuracy:0.9255 Error: 0.16333 Loss:0.11708 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.65s\n", - "745376 Examples seen. Accuracy:0.9258 Error: 0.21078 Loss:0.25444 Threads: 8 Forward time: 4.92s Backward time: 3.32s Step time: 3.64s\n", - "746016 Examples seen. Accuracy:0.9247 Error: 0.19289 Loss:0.15980 Threads: 8 Forward time: 4.91s Backward time: 3.32s Step time: 3.65s\n", - "746656 Examples seen. Accuracy:0.9250 Error: 0.19259 Loss:0.19599 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.68s\n", - "747296 Examples seen. Accuracy:0.9250 Error: 0.13941 Loss:0.15193 Threads: 8 Forward time: 5.03s Backward time: 3.34s Step time: 3.71s\n", - "747936 Examples seen. Accuracy:0.9250 Error: 0.16013 Loss:0.18090 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.67s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 15 Examples seen:748560 Validation Accuracy: 0.9652 Validation Error: 0.0958 Validation Loss: 0.0955 Total time: 83.60min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 15. Working time: 1.39 hours.\n", - "749200 Examples seen. Accuracy:0.9228 Error: 0.15590 Loss:0.13903 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.74s\n", - "749840 Examples seen. Accuracy:0.9228 Error: 0.14768 Loss:0.13154 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.70s\n", - "750480 Examples seen. Accuracy:0.9237 Error: 0.13735 Loss:0.12060 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 4.28s\n", - "751120 Examples seen. Accuracy:0.9237 Error: 0.29119 Loss:0.40009 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.69s\n", - "751760 Examples seen. Accuracy:0.9236 Error: 0.13724 Loss:0.08911 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.68s\n", - "752400 Examples seen. Accuracy:0.9234 Error: 0.23360 Loss:0.19725 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.67s\n", - "753040 Examples seen. Accuracy:0.9242 Error: 0.20335 Loss:0.17017 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.68s\n", - "753680 Examples seen. Accuracy:0.9253 Error: 0.24722 Loss:0.24969 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.70s\n", - "754320 Examples seen. Accuracy:0.9272 Error: 0.19458 Loss:0.20635 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.70s\n", - "754960 Examples seen. Accuracy:0.9266 Error: 0.23492 Loss:0.27245 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.70s\n", - "755600 Examples seen. Accuracy:0.9267 Error: 0.16444 Loss:0.15543 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.66s\n", - "756240 Examples seen. Accuracy:0.9267 Error: 0.21158 Loss:0.14912 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.67s\n", - "756880 Examples seen. Accuracy:0.9262 Error: 0.18051 Loss:0.16552 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.67s\n", - "757520 Examples seen. Accuracy:0.9250 Error: 0.19389 Loss:0.17590 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.72s\n", - "758160 Examples seen. Accuracy:0.9236 Error: 0.21560 Loss:0.24763 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.65s\n", - "758800 Examples seen. Accuracy:0.9240 Error: 0.14095 Loss:0.14267 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.66s\n", - "759440 Examples seen. Accuracy:0.9258 Error: 0.17888 Loss:0.17523 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.67s\n", - "760080 Examples seen. Accuracy:0.9258 Error: 0.20145 Loss:0.17130 Threads: 8 Forward time: 4.90s Backward time: 3.33s Step time: 3.68s\n", - "760720 Examples seen. Accuracy:0.9253 Error: 0.28773 Loss:0.26773 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "761360 Examples seen. Accuracy:0.9254 Error: 0.18094 Loss:0.17660 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n", - "762000 Examples seen. Accuracy:0.9264 Error: 0.13423 Loss:0.16859 Threads: 8 Forward time: 4.90s Backward time: 3.29s Step time: 3.65s\n", - "762640 Examples seen. Accuracy:0.9269 Error: 0.24560 Loss:0.25528 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.67s\n", - "763280 Examples seen. Accuracy:0.9269 Error: 0.18335 Loss:0.16227 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.68s\n", - "763920 Examples seen. Accuracy:0.9265 Error: 0.24932 Loss:0.21984 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "764560 Examples seen. Accuracy:0.9277 Error: 0.17412 Loss:0.19422 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.67s\n", - "765200 Examples seen. Accuracy:0.9281 Error: 0.28186 Loss:0.22473 Threads: 8 Forward time: 4.97s Backward time: 3.33s Step time: 3.70s\n", - "765840 Examples seen. Accuracy:0.9297 Error: 0.14800 Loss:0.21123 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.68s\n", - "766480 Examples seen. Accuracy:0.9285 Error: 0.21399 Loss:0.21713 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.69s\n", - "767120 Examples seen. Accuracy:0.9299 Error: 0.15176 Loss:0.12546 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.66s\n", - "767760 Examples seen. Accuracy:0.9289 Error: 0.16088 Loss:0.17257 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.66s\n", - "768400 Examples seen. Accuracy:0.9288 Error: 0.24400 Loss:0.30570 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.74s\n", - "769040 Examples seen. Accuracy:0.9278 Error: 0.23058 Loss:0.32038 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.65s\n", - "769680 Examples seen. Accuracy:0.9265 Error: 0.24857 Loss:0.20830 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.65s\n", - "770320 Examples seen. Accuracy:0.9256 Error: 0.14352 Loss:0.11134 Threads: 8 Forward time: 4.90s Backward time: 3.32s Step time: 3.66s\n", - "770960 Examples seen. Accuracy:0.9270 Error: 0.17809 Loss:0.21109 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.66s\n", - "771600 Examples seen. Accuracy:0.9254 Error: 0.24045 Loss:0.35792 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.69s\n", - "772240 Examples seen. Accuracy:0.9256 Error: 0.23819 Loss:0.31171 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.69s\n", - "772880 Examples seen. Accuracy:0.9265 Error: 0.22070 Loss:0.17204 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.73s\n", - "773520 Examples seen. Accuracy:0.9271 Error: 0.16275 Loss:0.13333 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.67s\n", - "774160 Examples seen. Accuracy:0.9261 Error: 0.19483 Loss:0.17396 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.69s\n", - "774800 Examples seen. Accuracy:0.9267 Error: 0.13429 Loss:0.12911 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.68s\n", - "775440 Examples seen. Accuracy:0.9267 Error: 0.21414 Loss:0.18982 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 4.31s\n", - "776080 Examples seen. Accuracy:0.9267 Error: 0.24838 Loss:0.20335 Threads: 8 Forward time: 5.19s Backward time: 3.44s Step time: 5.11s\n", - "776720 Examples seen. Accuracy:0.9280 Error: 0.17126 Loss:0.13963 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 4.20s\n", - "777360 Examples seen. Accuracy:0.9293 Error: 0.13795 Loss:0.13239 Threads: 8 Forward time: 5.03s Backward time: 3.30s Step time: 4.57s\n", - "778000 Examples seen. Accuracy:0.9295 Error: 0.17199 Loss:0.16685 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 4.66s\n", - "778640 Examples seen. Accuracy:0.9291 Error: 0.19896 Loss:0.17809 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.87s\n", - "779280 Examples seen. Accuracy:0.9286 Error: 0.17767 Loss:0.17971 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 3.69s\n", - "779920 Examples seen. Accuracy:0.9282 Error: 0.14948 Loss:0.12814 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 4.33s\n", - "780560 Examples seen. Accuracy:0.9277 Error: 0.14642 Loss:0.12847 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.71s\n", - "781200 Examples seen. Accuracy:0.9275 Error: 0.16637 Loss:0.16562 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.73s\n", - "781840 Examples seen. Accuracy:0.9289 Error: 0.18915 Loss:0.22348 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.71s\n", - "782480 Examples seen. Accuracy:0.9289 Error: 0.19786 Loss:0.19222 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.71s\n", - "783120 Examples seen. Accuracy:0.9288 Error: 0.23412 Loss:0.29708 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.70s\n", - "783760 Examples seen. Accuracy:0.9301 Error: 0.19054 Loss:0.18742 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.73s\n", - "784400 Examples seen. Accuracy:0.9283 Error: 0.36893 Loss:0.46569 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.75s\n", - "785040 Examples seen. Accuracy:0.9282 Error: 0.22870 Loss:0.23232 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.74s\n", - "785680 Examples seen. Accuracy:0.9287 Error: 0.15320 Loss:0.12290 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "786320 Examples seen. Accuracy:0.9282 Error: 0.16937 Loss:0.20100 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.73s\n", - "786960 Examples seen. Accuracy:0.9266 Error: 0.27295 Loss:0.29034 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "787600 Examples seen. Accuracy:0.9261 Error: 0.17976 Loss:0.17512 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.75s\n", - "788240 Examples seen. Accuracy:0.9260 Error: 0.07664 Loss:0.06292 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.77s\n", - "788880 Examples seen. Accuracy:0.9273 Error: 0.11200 Loss:0.08151 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.74s\n", - "789520 Examples seen. Accuracy:0.9271 Error: 0.16134 Loss:0.23983 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.73s\n", - "790160 Examples seen. Accuracy:0.9277 Error: 0.19006 Loss:0.15261 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.72s\n", - "790800 Examples seen. Accuracy:0.9309 Error: 0.19073 Loss:0.20922 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.71s\n", - "791440 Examples seen. Accuracy:0.9315 Error: 0.11079 Loss:0.11771 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.72s\n", - "792080 Examples seen. Accuracy:0.9315 Error: 0.19563 Loss:0.18784 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.70s\n", - "792720 Examples seen. Accuracy:0.9309 Error: 0.12118 Loss:0.07905 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.71s\n", - "793360 Examples seen. Accuracy:0.9299 Error: 0.26418 Loss:0.39082 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.72s\n", - "794000 Examples seen. Accuracy:0.9308 Error: 0.16303 Loss:0.13526 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.71s\n", - "794640 Examples seen. Accuracy:0.9294 Error: 0.19839 Loss:0.17040 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.71s\n", - "795280 Examples seen. Accuracy:0.9312 Error: 0.17635 Loss:0.20949 Threads: 8 Forward time: 5.01s Backward time: 3.37s Step time: 3.75s\n", - "795920 Examples seen. Accuracy:0.9312 Error: 0.17916 Loss:0.14813 Threads: 8 Forward time: 4.92s Backward time: 3.32s Step time: 3.72s\n", - "796560 Examples seen. Accuracy:0.9291 Error: 0.25460 Loss:0.22747 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.70s\n", - "797200 Examples seen. Accuracy:0.9289 Error: 0.22125 Loss:0.23003 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.71s\n", - "797840 Examples seen. Accuracy:0.9296 Error: 0.21544 Loss:0.20087 Threads: 8 Forward time: 5.04s Backward time: 3.36s Step time: 3.72s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epochs: 16 Examples seen:798464 Validation Accuracy: 0.9674 Validation Error: 0.0888 Validation Loss: 0.0881 Total time: 88.98min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.359 Min Weight: -0.303 Max Output: 5.221 Min Output: -5.160 TNNetConvolutionLinear 66,66,64 Times: 8.52s 0.40s Parent:0\n", - "Layer 2 Max Output: 5.221 Min Output: -2.959 TNNetMaxPool 33,33,64 Times: 3.61s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.569 Min Weight: 0.270 Max Output: 8.702 Min Output: -5.675 TNNetMovingStdNormalization 33,33,64 Times: 0.27s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.265 Min Weight: -0.199 Max Output: 9.768 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.83s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.330 Min Weight: -0.325 Max Output: 11.329 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.82s 0.18s Parent:4\n", - "Layer 6 Max Output: 11.329 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.49s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.302 Min Weight: -0.237 Max Output: 6.427 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.240 Min Weight: -0.225 Max Output: 6.061 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.47s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.211 Min Weight: -0.201 Max Output: 7.872 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.45s 0.02s Parent:8\n", - "Layer 10 Max Output: 7.872 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.00s 0.00s Parent:9\n", - "Layer 11 Max Output: 7.872 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.366 Min Weight: -0.331 Max Output: 22.108 Min Output: -13.015 TNNetFullConnectLinear 39,1,1 Times: 0.04s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.8 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 16. Working time: 1.48 hours.\n", - "799104 Examples seen. Accuracy:0.9279 Error: 0.28146 Loss:0.52051 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.69s\n", - "799744 Examples seen. Accuracy:0.9269 Error: 0.24391 Loss:0.27840 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "800384 Examples seen. Accuracy:0.9273 Error: 0.25354 Loss:0.31023 Threads: 8 Forward time: 5.05s Backward time: 3.31s Step time: 3.70s\n", - "801024 Examples seen. Accuracy:0.9255 Error: 0.22366 Loss:0.31534 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.73s\n", - "801664 Examples seen. Accuracy:0.9235 Error: 0.24743 Loss:0.43041 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.69s\n", - "802304 Examples seen. Accuracy:0.9227 Error: 0.24070 Loss:0.21201 Threads: 8 Forward time: 4.96s Backward time: 3.34s Step time: 3.70s\n", - "802944 Examples seen. Accuracy:0.9229 Error: 0.31073 Loss:0.38355 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.70s\n", - "803584 Examples seen. Accuracy:0.9206 Error: 0.34427 Loss:0.45807 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.70s\n", - "804224 Examples seen. Accuracy:0.9221 Error: 0.14951 Loss:0.13564 Threads: 8 Forward time: 5.13s Backward time: 3.34s Step time: 4.28s\n", - "804864 Examples seen. Accuracy:0.9220 Error: 0.27017 Loss:0.26060 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.71s\n", - "805504 Examples seen. Accuracy:0.9228 Error: 0.14546 Loss:0.12689 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.68s\n", - "806144 Examples seen. Accuracy:0.9221 Error: 0.24039 Loss:0.26874 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.67s\n", - "806784 Examples seen. Accuracy:0.9251 Error: 0.11481 Loss:0.08027 Threads: 8 Forward time: 4.97s Backward time: 3.34s Step time: 3.67s\n", - "807424 Examples seen. Accuracy:0.9232 Error: 0.28140 Loss:0.32225 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.66s\n", - "808064 Examples seen. Accuracy:0.9228 Error: 0.21544 Loss:0.24749 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.67s\n", - "808704 Examples seen. Accuracy:0.9233 Error: 0.24415 Loss:0.22534 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.68s\n", - "809344 Examples seen. Accuracy:0.9236 Error: 0.18281 Loss:0.19059 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.67s\n", - "809984 Examples seen. Accuracy:0.9237 Error: 0.29003 Loss:0.30573 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.67s\n", - "810624 Examples seen. Accuracy:0.9254 Error: 0.16253 Loss:0.13091 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.69s\n", - "811264 Examples seen. Accuracy:0.9270 Error: 0.24708 Loss:0.22703 Threads: 8 Forward time: 4.96s Backward time: 3.36s Step time: 3.69s\n", - "811904 Examples seen. Accuracy:0.9233 Error: 0.18024 Loss:0.27344 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.69s\n", - "812544 Examples seen. Accuracy:0.9192 Error: 0.35274 Loss:0.37108 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.68s\n", - "813184 Examples seen. Accuracy:0.9198 Error: 0.16868 Loss:0.14000 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 3.69s\n", - "813824 Examples seen. Accuracy:0.9207 Error: 0.21982 Loss:0.17579 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.67s\n", - "814464 Examples seen. Accuracy:0.9218 Error: 0.09917 Loss:0.08106 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.71s\n", - "815104 Examples seen. Accuracy:0.9210 Error: 0.31732 Loss:0.45190 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.70s\n", - "815744 Examples seen. Accuracy:0.9233 Error: 0.15465 Loss:0.12987 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.66s\n", - "816384 Examples seen. Accuracy:0.9228 Error: 0.11111 Loss:0.08842 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.67s\n", - "817024 Examples seen. Accuracy:0.9233 Error: 0.33055 Loss:0.41229 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.66s\n", - "817664 Examples seen. Accuracy:0.9247 Error: 0.20235 Loss:0.14343 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.64s\n", - "818304 Examples seen. Accuracy:0.9260 Error: 0.10952 Loss:0.07429 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.65s\n", - "818944 Examples seen. Accuracy:0.9269 Error: 0.15545 Loss:0.11444 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.65s\n", - "819584 Examples seen. Accuracy:0.9286 Error: 0.18680 Loss:0.18544 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.65s\n", - "820224 Examples seen. Accuracy:0.9287 Error: 0.18136 Loss:0.21638 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.68s\n", - "820864 Examples seen. Accuracy:0.9300 Error: 0.16558 Loss:0.11525 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.67s\n", - "821504 Examples seen. Accuracy:0.9314 Error: 0.13345 Loss:0.16936 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.65s\n", - "822144 Examples seen. Accuracy:0.9321 Error: 0.25381 Loss:0.28719 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.66s\n", - "822784 Examples seen. Accuracy:0.9319 Error: 0.24331 Loss:0.35148 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.65s\n", - "823424 Examples seen. Accuracy:0.9299 Error: 0.40296 Loss:0.49650 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.66s\n", - "824064 Examples seen. Accuracy:0.9288 Error: 0.16747 Loss:0.11808 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.65s\n", - "824704 Examples seen. Accuracy:0.9283 Error: 0.18405 Loss:0.17831 Threads: 8 Forward time: 4.91s Backward time: 3.32s Step time: 3.65s\n", - "825344 Examples seen. Accuracy:0.9276 Error: 0.20185 Loss:0.21823 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "825984 Examples seen. Accuracy:0.9280 Error: 0.18283 Loss:0.18587 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.69s\n", - "826624 Examples seen. Accuracy:0.9286 Error: 0.19055 Loss:0.18149 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "827264 Examples seen. Accuracy:0.9293 Error: 0.12547 Loss:0.08409 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.69s\n", - "827904 Examples seen. Accuracy:0.9289 Error: 0.17753 Loss:0.20191 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.66s\n", - "828544 Examples seen. Accuracy:0.9292 Error: 0.13984 Loss:0.10963 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.68s\n", - "829184 Examples seen. Accuracy:0.9292 Error: 0.18744 Loss:0.18920 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.71s\n", - "829824 Examples seen. Accuracy:0.9282 Error: 0.19414 Loss:0.16085 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.66s\n", - "830464 Examples seen. Accuracy:0.9283 Error: 0.05182 Loss:0.02891 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.66s\n", - "831104 Examples seen. Accuracy:0.9301 Error: 0.10136 Loss:0.06753 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.67s\n", - "831744 Examples seen. Accuracy:0.9312 Error: 0.15923 Loss:0.16901 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.68s\n", - "832384 Examples seen. Accuracy:0.9294 Error: 0.33872 Loss:0.42488 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.68s\n", - "833024 Examples seen. Accuracy:0.9276 Error: 0.26633 Loss:0.33095 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.68s\n", - "833664 Examples seen. Accuracy:0.9280 Error: 0.17941 Loss:0.13564 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.68s\n", - "834304 Examples seen. Accuracy:0.9288 Error: 0.20437 Loss:0.26723 Threads: 8 Forward time: 5.15s Backward time: 3.37s Step time: 4.28s\n", - "834944 Examples seen. Accuracy:0.9290 Error: 0.21144 Loss:0.22799 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.79s\n", - "835584 Examples seen. Accuracy:0.9275 Error: 0.15827 Loss:0.10988 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.72s\n", - "836224 Examples seen. Accuracy:0.9269 Error: 0.23111 Loss:0.27598 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.84s\n", - "836864 Examples seen. Accuracy:0.9282 Error: 0.16234 Loss:0.11793 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.74s\n", - "837504 Examples seen. Accuracy:0.9290 Error: 0.20848 Loss:0.19009 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.70s\n", - "838144 Examples seen. Accuracy:0.9293 Error: 0.11908 Loss:0.08998 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.70s\n", - "838784 Examples seen. Accuracy:0.9307 Error: 0.10253 Loss:0.06417 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.68s\n", - "839424 Examples seen. Accuracy:0.9309 Error: 0.08945 Loss:0.07415 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.76s\n", - "840064 Examples seen. Accuracy:0.9303 Error: 0.29540 Loss:0.26998 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.66s\n", - "840704 Examples seen. Accuracy:0.9303 Error: 0.16691 Loss:0.18135 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.65s\n", - "841344 Examples seen. Accuracy:0.9305 Error: 0.09373 Loss:0.07867 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.69s\n", - "841984 Examples seen. Accuracy:0.9319 Error: 0.16894 Loss:0.21846 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.70s\n", - "842624 Examples seen. Accuracy:0.9334 Error: 0.12858 Loss:0.14987 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.69s\n", - "843264 Examples seen. Accuracy:0.9344 Error: 0.15648 Loss:0.13730 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.69s\n", - "843904 Examples seen. Accuracy:0.9337 Error: 0.25002 Loss:0.25660 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.70s\n", - "844544 Examples seen. Accuracy:0.9346 Error: 0.08790 Loss:0.12666 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.67s\n", - "845184 Examples seen. Accuracy:0.9340 Error: 0.23597 Loss:0.31651 Threads: 8 Forward time: 4.90s Backward time: 3.30s Step time: 3.68s\n", - "845824 Examples seen. Accuracy:0.9342 Error: 0.13486 Loss:0.09496 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.61s\n", - "846464 Examples seen. Accuracy:0.9329 Error: 0.08714 Loss:0.14212 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.60s\n", - "847104 Examples seen. Accuracy:0.9330 Error: 0.23075 Loss:0.31477 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "847744 Examples seen. Accuracy:0.9337 Error: 0.13689 Loss:0.18215 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.68s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 17 Examples seen:848368 Validation Accuracy: 0.9692 Validation Error: 0.0837 Validation Loss: 0.0860 Total time: 94.26min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 17. Working time: 1.57 hours.\n", - "849008 Examples seen. Accuracy:0.9315 Error: 0.20078 Loss:0.24331 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.69s\n", - "849648 Examples seen. Accuracy:0.9311 Error: 0.15992 Loss:0.14799 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.65s\n", - "850288 Examples seen. Accuracy:0.9318 Error: 0.22830 Loss:0.19558 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.69s\n", - "850928 Examples seen. Accuracy:0.9310 Error: 0.29294 Loss:0.32634 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.80s\n", - "851568 Examples seen. Accuracy:0.9301 Error: 0.23995 Loss:0.18865 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.66s\n", - "852208 Examples seen. Accuracy:0.9303 Error: 0.14758 Loss:0.12296 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.66s\n", - "852848 Examples seen. Accuracy:0.9311 Error: 0.14358 Loss:0.10642 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.65s\n", - "853488 Examples seen. Accuracy:0.9312 Error: 0.19626 Loss:0.19077 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.66s\n", - "854128 Examples seen. Accuracy:0.9325 Error: 0.11741 Loss:0.08145 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.66s\n", - "854768 Examples seen. Accuracy:0.9322 Error: 0.19403 Loss:0.16102 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.64s\n", - "855408 Examples seen. Accuracy:0.9318 Error: 0.17729 Loss:0.15791 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 3.67s\n", - "856048 Examples seen. Accuracy:0.9315 Error: 0.15296 Loss:0.15293 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.64s\n", - "856688 Examples seen. Accuracy:0.9334 Error: 0.10373 Loss:0.10627 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.65s\n", - "857328 Examples seen. Accuracy:0.9330 Error: 0.22403 Loss:0.28188 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.67s\n", - "857968 Examples seen. Accuracy:0.9328 Error: 0.33263 Loss:0.35806 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.65s\n", - "858608 Examples seen. Accuracy:0.9338 Error: 0.18441 Loss:0.17305 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.65s\n", - "859248 Examples seen. Accuracy:0.9343 Error: 0.13627 Loss:0.12101 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 4.29s\n", - "859888 Examples seen. Accuracy:0.9335 Error: 0.19746 Loss:0.21544 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.76s\n", - "860528 Examples seen. Accuracy:0.9315 Error: 0.14519 Loss:0.19130 Threads: 8 Forward time: 4.99s Backward time: 3.37s Step time: 3.78s\n", - "861168 Examples seen. Accuracy:0.9310 Error: 0.23647 Loss:0.23213 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.78s\n", - "861808 Examples seen. Accuracy:0.9338 Error: 0.12715 Loss:0.10385 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.70s\n", - "862448 Examples seen. Accuracy:0.9336 Error: 0.15821 Loss:0.13975 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "863088 Examples seen. Accuracy:0.9347 Error: 0.17543 Loss:0.12809 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.70s\n", - "863728 Examples seen. Accuracy:0.9347 Error: 0.21422 Loss:0.22431 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.71s\n", - "864368 Examples seen. Accuracy:0.9336 Error: 0.30127 Loss:0.35847 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.73s\n", - "865008 Examples seen. Accuracy:0.9342 Error: 0.12342 Loss:0.12423 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.73s\n", - "865648 Examples seen. Accuracy:0.9339 Error: 0.23570 Loss:0.18630 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.69s\n", - "866288 Examples seen. Accuracy:0.9329 Error: 0.16039 Loss:0.12395 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.70s\n", - "866928 Examples seen. Accuracy:0.9327 Error: 0.11549 Loss:0.15805 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.70s\n", - "867568 Examples seen. Accuracy:0.9314 Error: 0.31881 Loss:0.37359 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.71s\n", - "868208 Examples seen. Accuracy:0.9308 Error: 0.20159 Loss:0.15719 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.71s\n", - "868848 Examples seen. Accuracy:0.9322 Error: 0.17554 Loss:0.13623 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.70s\n", - "869488 Examples seen. Accuracy:0.9339 Error: 0.19699 Loss:0.16187 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.71s\n", - "870128 Examples seen. Accuracy:0.9344 Error: 0.20704 Loss:0.26117 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.78s\n", - "870768 Examples seen. Accuracy:0.9336 Error: 0.11151 Loss:0.09849 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.72s\n", - "871408 Examples seen. Accuracy:0.9324 Error: 0.15624 Loss:0.15004 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.78s\n", - "872048 Examples seen. Accuracy:0.9330 Error: 0.12650 Loss:0.16674 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.76s\n", - "872688 Examples seen. Accuracy:0.9340 Error: 0.20329 Loss:0.18442 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.73s\n", - "873328 Examples seen. Accuracy:0.9321 Error: 0.15509 Loss:0.10589 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.69s\n", - "873968 Examples seen. Accuracy:0.9326 Error: 0.17602 Loss:0.17291 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.70s\n", - "874608 Examples seen. Accuracy:0.9337 Error: 0.10286 Loss:0.07093 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.69s\n", - "875248 Examples seen. Accuracy:0.9341 Error: 0.12489 Loss:0.08589 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.70s\n", - "875888 Examples seen. Accuracy:0.9335 Error: 0.24831 Loss:0.27215 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.71s\n", - "876528 Examples seen. Accuracy:0.9344 Error: 0.14031 Loss:0.10177 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.67s\n", - "877168 Examples seen. Accuracy:0.9356 Error: 0.21846 Loss:0.29976 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.66s\n", - "877808 Examples seen. Accuracy:0.9346 Error: 0.17239 Loss:0.16957 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.64s\n", - "878448 Examples seen. Accuracy:0.9354 Error: 0.18697 Loss:0.12211 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.65s\n", - "879088 Examples seen. Accuracy:0.9355 Error: 0.05862 Loss:0.03569 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.64s\n", - "879728 Examples seen. Accuracy:0.9363 Error: 0.15455 Loss:0.14466 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.63s\n", - "880368 Examples seen. Accuracy:0.9351 Error: 0.18215 Loss:0.25896 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.67s\n", - "881008 Examples seen. Accuracy:0.9353 Error: 0.21088 Loss:0.18012 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.69s\n", - "881648 Examples seen. Accuracy:0.9351 Error: 0.23289 Loss:0.35030 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.67s\n", - "882288 Examples seen. Accuracy:0.9345 Error: 0.18064 Loss:0.13170 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.79s\n", - "882928 Examples seen. Accuracy:0.9328 Error: 0.18830 Loss:0.19651 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.68s\n", - "883568 Examples seen. Accuracy:0.9319 Error: 0.19475 Loss:0.17783 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.67s\n", - "884208 Examples seen. Accuracy:0.9315 Error: 0.22865 Loss:0.25211 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.71s\n", - "884848 Examples seen. Accuracy:0.9320 Error: 0.22454 Loss:0.34938 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.67s\n", - "885488 Examples seen. Accuracy:0.9325 Error: 0.13374 Loss:0.09170 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.69s\n", - "886128 Examples seen. Accuracy:0.9336 Error: 0.14062 Loss:0.09998 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.65s\n", - "886768 Examples seen. Accuracy:0.9343 Error: 0.12686 Loss:0.09611 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.67s\n", - "887408 Examples seen. Accuracy:0.9354 Error: 0.26648 Loss:0.27128 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.67s\n", - "888048 Examples seen. Accuracy:0.9345 Error: 0.15077 Loss:0.17112 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.69s\n", - "888688 Examples seen. Accuracy:0.9326 Error: 0.17508 Loss:0.24567 Threads: 8 Forward time: 5.20s Backward time: 3.41s Step time: 3.71s\n", - "889328 Examples seen. Accuracy:0.9334 Error: 0.15998 Loss:0.14963 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 4.31s\n", - "889968 Examples seen. Accuracy:0.9327 Error: 0.25797 Loss:0.22629 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.69s\n", - "890608 Examples seen. Accuracy:0.9350 Error: 0.16556 Loss:0.22346 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.68s\n", - "891248 Examples seen. Accuracy:0.9347 Error: 0.19274 Loss:0.17709 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.70s\n", - "891888 Examples seen. Accuracy:0.9336 Error: 0.18390 Loss:0.14382 Threads: 8 Forward time: 4.91s Backward time: 3.28s Step time: 3.74s\n", - "892528 Examples seen. Accuracy:0.9333 Error: 0.21437 Loss:0.23370 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.75s\n", - "893168 Examples seen. Accuracy:0.9339 Error: 0.15086 Loss:0.11176 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.71s\n", - "893808 Examples seen. Accuracy:0.9340 Error: 0.14431 Loss:0.11742 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.67s\n", - "894448 Examples seen. Accuracy:0.9342 Error: 0.15652 Loss:0.17883 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.75s\n", - "895088 Examples seen. Accuracy:0.9341 Error: 0.08559 Loss:0.08442 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.65s\n", - "895728 Examples seen. Accuracy:0.9348 Error: 0.15858 Loss:0.13670 Threads: 8 Forward time: 5.09s Backward time: 3.39s Step time: 3.79s\n", - "896368 Examples seen. Accuracy:0.9354 Error: 0.13787 Loss:0.16702 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.66s\n", - "897008 Examples seen. Accuracy:0.9351 Error: 0.19693 Loss:0.16949 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.66s\n", - "897648 Examples seen. Accuracy:0.9340 Error: 0.14535 Loss:0.10326 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.81s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 18 Examples seen:898272 Validation Accuracy: 0.9721 Validation Error: 0.0786 Validation Loss: 0.0815 Total time: 99.55min\n", - "Epoch time: 5 minutes. 100 epochs: 8.3 hours.\n", - "Epochs: 18. Working time: 1.66 hours.\n", - "898912 Examples seen. Accuracy:0.9332 Error: 0.28565 Loss:0.36515 Threads: 8 Forward time: 4.89s Backward time: 3.31s Step time: 3.66s\n", - "899552 Examples seen. Accuracy:0.9332 Error: 0.19622 Loss:0.23889 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "900192 Examples seen. Accuracy:0.9327 Error: 0.22202 Loss:0.22946 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.63s\n", - "900832 Examples seen. Accuracy:0.9333 Error: 0.25322 Loss:0.27746 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.63s\n", - "901472 Examples seen. Accuracy:0.9333 Error: 0.16254 Loss:0.13168 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.63s\n", - "902112 Examples seen. Accuracy:0.9347 Error: 0.08337 Loss:0.05627 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.62s\n", - "902752 Examples seen. Accuracy:0.9340 Error: 0.16384 Loss:0.24361 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.67s\n", - "903392 Examples seen. Accuracy:0.9348 Error: 0.15722 Loss:0.13759 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.63s\n", - "904032 Examples seen. Accuracy:0.9351 Error: 0.14440 Loss:0.22122 Threads: 8 Forward time: 4.91s Backward time: 3.34s Step time: 3.66s\n", - "904672 Examples seen. Accuracy:0.9353 Error: 0.18423 Loss:0.16436 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.65s\n", - "905312 Examples seen. Accuracy:0.9346 Error: 0.19697 Loss:0.14631 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.66s\n", - "905952 Examples seen. Accuracy:0.9350 Error: 0.20111 Loss:0.18979 Threads: 8 Forward time: 5.04s Backward time: 3.33s Step time: 3.70s\n", - "906592 Examples seen. Accuracy:0.9346 Error: 0.15856 Loss:0.11087 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.66s\n", - "907232 Examples seen. Accuracy:0.9358 Error: 0.18154 Loss:0.15843 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.65s\n", - "907872 Examples seen. Accuracy:0.9370 Error: 0.20007 Loss:0.20796 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.91s\n", - "908512 Examples seen. Accuracy:0.9371 Error: 0.16443 Loss:0.15782 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.93s\n", - "909152 Examples seen. Accuracy:0.9358 Error: 0.16584 Loss:0.20556 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 4.22s\n", - "909792 Examples seen. Accuracy:0.9358 Error: 0.12115 Loss:0.08364 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.71s\n", - "910432 Examples seen. Accuracy:0.9355 Error: 0.21529 Loss:0.21447 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.71s\n", - "911072 Examples seen. Accuracy:0.9365 Error: 0.08919 Loss:0.05372 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.69s\n", - "911712 Examples seen. Accuracy:0.9371 Error: 0.13669 Loss:0.09794 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.69s\n", - "912352 Examples seen. Accuracy:0.9374 Error: 0.14753 Loss:0.13567 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.69s\n", - "912992 Examples seen. Accuracy:0.9376 Error: 0.13236 Loss:0.13950 Threads: 8 Forward time: 5.02s Backward time: 3.37s Step time: 3.72s\n", - "913632 Examples seen. Accuracy:0.9379 Error: 0.14187 Loss:0.11796 Threads: 8 Forward time: 5.81s Backward time: 3.78s Step time: 4.31s\n", - "914272 Examples seen. Accuracy:0.9366 Error: 0.18883 Loss:0.14902 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.73s\n", - "914912 Examples seen. Accuracy:0.9356 Error: 0.13842 Loss:0.16856 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.72s\n", - "915552 Examples seen. Accuracy:0.9335 Error: 0.31244 Loss:0.41594 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.71s\n", - "916192 Examples seen. Accuracy:0.9336 Error: 0.21795 Loss:0.19179 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.69s\n", - "916832 Examples seen. Accuracy:0.9319 Error: 0.15477 Loss:0.19886 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.70s\n", - "917472 Examples seen. Accuracy:0.9304 Error: 0.11203 Loss:0.11011 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.70s\n", - "918112 Examples seen. Accuracy:0.9288 Error: 0.19572 Loss:0.26014 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.69s\n", - "918752 Examples seen. Accuracy:0.9291 Error: 0.25683 Loss:0.24395 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.76s\n", - "919392 Examples seen. Accuracy:0.9303 Error: 0.14958 Loss:0.15238 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.70s\n", - "920032 Examples seen. Accuracy:0.9318 Error: 0.17831 Loss:0.20759 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.68s\n", - "920672 Examples seen. Accuracy:0.9315 Error: 0.09786 Loss:0.06715 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.70s\n", - "921312 Examples seen. Accuracy:0.9327 Error: 0.16318 Loss:0.14284 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.69s\n", - "921952 Examples seen. Accuracy:0.9337 Error: 0.12429 Loss:0.13042 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.69s\n", - "922592 Examples seen. Accuracy:0.9347 Error: 0.24117 Loss:0.26262 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.71s\n", - "923232 Examples seen. Accuracy:0.9341 Error: 0.30670 Loss:0.37487 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.70s\n", - "923872 Examples seen. Accuracy:0.9346 Error: 0.12323 Loss:0.19294 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.70s\n", - "924512 Examples seen. Accuracy:0.9347 Error: 0.10201 Loss:0.13114 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.71s\n", - "925152 Examples seen. Accuracy:0.9332 Error: 0.14510 Loss:0.14428 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.69s\n", - "925792 Examples seen. Accuracy:0.9335 Error: 0.15169 Loss:0.21652 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.69s\n", - "926432 Examples seen. Accuracy:0.9325 Error: 0.08620 Loss:0.06931 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.67s\n", - "927072 Examples seen. Accuracy:0.9321 Error: 0.29239 Loss:0.31947 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.68s\n", - "927712 Examples seen. Accuracy:0.9321 Error: 0.09640 Loss:0.08032 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.75s\n", - "928352 Examples seen. Accuracy:0.9333 Error: 0.11946 Loss:0.09632 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.69s\n", - "928992 Examples seen. Accuracy:0.9339 Error: 0.24253 Loss:0.25251 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.73s\n", - "929632 Examples seen. Accuracy:0.9333 Error: 0.23032 Loss:0.25145 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.66s\n", - "930272 Examples seen. Accuracy:0.9334 Error: 0.13630 Loss:0.14162 Threads: 8 Forward time: 5.00s Backward time: 3.32s Step time: 3.70s\n", - "930912 Examples seen. Accuracy:0.9325 Error: 0.23179 Loss:0.20368 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.74s\n", - "931552 Examples seen. Accuracy:0.9330 Error: 0.16954 Loss:0.15874 Threads: 8 Forward time: 4.96s Backward time: 3.32s Step time: 3.72s\n", - "932192 Examples seen. Accuracy:0.9327 Error: 0.11960 Loss:0.09325 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.72s\n", - "932832 Examples seen. Accuracy:0.9330 Error: 0.25955 Loss:0.29273 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.70s\n", - "933472 Examples seen. Accuracy:0.9331 Error: 0.13731 Loss:0.08973 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.78s\n", - "934112 Examples seen. Accuracy:0.9333 Error: 0.16796 Loss:0.15477 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.75s\n", - "934752 Examples seen. Accuracy:0.9327 Error: 0.10998 Loss:0.15942 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.73s\n", - "935392 Examples seen. Accuracy:0.9322 Error: 0.16077 Loss:0.13651 Threads: 8 Forward time: 5.07s Backward time: 3.29s Step time: 3.75s\n", - "936032 Examples seen. Accuracy:0.9339 Error: 0.10061 Loss:0.07049 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.77s\n", - "936672 Examples seen. Accuracy:0.9343 Error: 0.28101 Loss:0.31674 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.73s\n", - "937312 Examples seen. Accuracy:0.9341 Error: 0.07737 Loss:0.06687 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "937952 Examples seen. Accuracy:0.9340 Error: 0.23360 Loss:0.23271 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.70s\n", - "938592 Examples seen. Accuracy:0.9349 Error: 0.09053 Loss:0.06425 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.69s\n", - "939232 Examples seen. Accuracy:0.9342 Error: 0.24665 Loss:0.18591 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.71s\n", - "939872 Examples seen. Accuracy:0.9354 Error: 0.10677 Loss:0.10166 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.67s\n", - "940512 Examples seen. Accuracy:0.9350 Error: 0.17089 Loss:0.12212 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.69s\n", - "941152 Examples seen. Accuracy:0.9347 Error: 0.19117 Loss:0.16232 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.69s\n", - "941792 Examples seen. Accuracy:0.9345 Error: 0.11992 Loss:0.10492 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.69s\n", - "942432 Examples seen. Accuracy:0.9342 Error: 0.23129 Loss:0.26354 Threads: 8 Forward time: 5.03s Backward time: 3.30s Step time: 3.69s\n", - "943072 Examples seen. Accuracy:0.9346 Error: 0.14941 Loss:0.20970 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.72s\n", - "943712 Examples seen. Accuracy:0.9344 Error: 0.11692 Loss:0.09496 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 4.26s\n", - "944352 Examples seen. Accuracy:0.9355 Error: 0.15168 Loss:0.13723 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.66s\n", - "944992 Examples seen. Accuracy:0.9351 Error: 0.16192 Loss:0.19788 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.65s\n", - "945632 Examples seen. Accuracy:0.9350 Error: 0.17196 Loss:0.22656 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.66s\n", - "946272 Examples seen. Accuracy:0.9350 Error: 0.18142 Loss:0.19020 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.68s\n", - "946912 Examples seen. Accuracy:0.9342 Error: 0.14163 Loss:0.12894 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.68s\n", - "947552 Examples seen. Accuracy:0.9339 Error: 0.22605 Loss:0.20335 Threads: 8 Forward time: 4.90s Backward time: 3.29s Step time: 3.61s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 19 Examples seen:948176 Validation Accuracy: 0.9728 Validation Error: 0.0752 Validation Loss: 0.0796 Total time: 104.86min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 19. Working time: 1.75 hours.\n", - "948816 Examples seen. Accuracy:0.9337 Error: 0.08842 Loss:0.05630 Threads: 8 Forward time: 4.89s Backward time: 3.29s Step time: 3.64s\n", - "949456 Examples seen. Accuracy:0.9335 Error: 0.14330 Loss:0.09962 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.64s\n", - "950096 Examples seen. Accuracy:0.9330 Error: 0.09488 Loss:0.06687 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.64s\n", - "950736 Examples seen. Accuracy:0.9318 Error: 0.17886 Loss:0.12939 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.68s\n", - "951376 Examples seen. Accuracy:0.9325 Error: 0.11387 Loss:0.09973 Threads: 8 Forward time: 4.90s Backward time: 3.28s Step time: 3.62s\n", - "952016 Examples seen. Accuracy:0.9331 Error: 0.13560 Loss:0.10236 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.62s\n", - "952656 Examples seen. Accuracy:0.9338 Error: 0.14782 Loss:0.10988 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.63s\n", - "953296 Examples seen. Accuracy:0.9346 Error: 0.12662 Loss:0.09441 Threads: 8 Forward time: 4.91s Backward time: 3.28s Step time: 3.61s\n", - "953936 Examples seen. Accuracy:0.9348 Error: 0.20488 Loss:0.22021 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.61s\n", - "954576 Examples seen. Accuracy:0.9339 Error: 0.09196 Loss:0.07950 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.63s\n", - "955216 Examples seen. Accuracy:0.9325 Error: 0.10137 Loss:0.07042 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.67s\n", - "955856 Examples seen. Accuracy:0.9318 Error: 0.11629 Loss:0.14477 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.61s\n", - "956496 Examples seen. Accuracy:0.9322 Error: 0.09635 Loss:0.08045 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.61s\n", - "957136 Examples seen. Accuracy:0.9296 Error: 0.21215 Loss:0.33157 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.60s\n", - "957776 Examples seen. Accuracy:0.9294 Error: 0.26400 Loss:0.29434 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.61s\n", - "958416 Examples seen. Accuracy:0.9298 Error: 0.23262 Loss:0.32465 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.63s\n", - "959056 Examples seen. Accuracy:0.9296 Error: 0.25139 Loss:0.25538 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.65s\n", - "959696 Examples seen. Accuracy:0.9308 Error: 0.07061 Loss:0.04188 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.76s\n", - "960336 Examples seen. Accuracy:0.9303 Error: 0.14351 Loss:0.11191 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "960976 Examples seen. Accuracy:0.9314 Error: 0.13034 Loss:0.17459 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.72s\n", - "961616 Examples seen. Accuracy:0.9317 Error: 0.18498 Loss:0.15089 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.68s\n", - "962256 Examples seen. Accuracy:0.9319 Error: 0.15155 Loss:0.11495 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.72s\n", - "962896 Examples seen. Accuracy:0.9314 Error: 0.17474 Loss:0.20256 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "963536 Examples seen. Accuracy:0.9327 Error: 0.09224 Loss:0.06967 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.65s\n", - "964176 Examples seen. Accuracy:0.9336 Error: 0.15469 Loss:0.12259 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.64s\n", - "964816 Examples seen. Accuracy:0.9345 Error: 0.16874 Loss:0.16323 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "965456 Examples seen. Accuracy:0.9330 Error: 0.21291 Loss:0.26617 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.70s\n", - "966096 Examples seen. Accuracy:0.9335 Error: 0.07029 Loss:0.04219 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.72s\n", - "966736 Examples seen. Accuracy:0.9343 Error: 0.12966 Loss:0.08937 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.71s\n", - "967376 Examples seen. Accuracy:0.9351 Error: 0.11018 Loss:0.11500 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.69s\n", - "968016 Examples seen. Accuracy:0.9369 Error: 0.14996 Loss:0.13956 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.72s\n", - "968656 Examples seen. Accuracy:0.9377 Error: 0.13940 Loss:0.11225 Threads: 8 Forward time: 7.83s Backward time: 4.96s Step time: 4.19s\n", - "969296 Examples seen. Accuracy:0.9374 Error: 0.12336 Loss:0.15393 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.73s\n", - "969936 Examples seen. Accuracy:0.9383 Error: 0.16669 Loss:0.13981 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "970576 Examples seen. Accuracy:0.9370 Error: 0.15209 Loss:0.11699 Threads: 8 Forward time: 5.18s Backward time: 3.35s Step time: 3.73s\n", - "971216 Examples seen. Accuracy:0.9369 Error: 0.14281 Loss:0.14358 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.66s\n", - "971856 Examples seen. Accuracy:0.9376 Error: 0.10629 Loss:0.10674 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.74s\n", - "972496 Examples seen. Accuracy:0.9380 Error: 0.17030 Loss:0.21394 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.73s\n", - "973136 Examples seen. Accuracy:0.9364 Error: 0.20765 Loss:0.19736 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.71s\n", - "973776 Examples seen. Accuracy:0.9386 Error: 0.11261 Loss:0.07438 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.66s\n", - "974416 Examples seen. Accuracy:0.9400 Error: 0.06419 Loss:0.03617 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "975056 Examples seen. Accuracy:0.9402 Error: 0.16715 Loss:0.20872 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.66s\n", - "975696 Examples seen. Accuracy:0.9413 Error: 0.11517 Loss:0.07074 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.65s\n", - "976336 Examples seen. Accuracy:0.9396 Error: 0.16187 Loss:0.13369 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.66s\n", - "976976 Examples seen. Accuracy:0.9398 Error: 0.12819 Loss:0.15170 Threads: 8 Forward time: 4.89s Backward time: 3.24s Step time: 3.64s\n", - "977616 Examples seen. Accuracy:0.9382 Error: 0.22360 Loss:0.22564 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.68s\n", - "978256 Examples seen. Accuracy:0.9357 Error: 0.14552 Loss:0.14895 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.63s\n", - "978896 Examples seen. Accuracy:0.9350 Error: 0.17976 Loss:0.15143 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.66s\n", - "979536 Examples seen. Accuracy:0.9356 Error: 0.12707 Loss:0.11543 Threads: 8 Forward time: 4.91s Backward time: 3.27s Step time: 3.63s\n", - "980176 Examples seen. Accuracy:0.9363 Error: 0.18955 Loss:0.20754 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.62s\n", - "980816 Examples seen. Accuracy:0.9381 Error: 0.30464 Loss:0.38584 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.61s\n", - "981456 Examples seen. Accuracy:0.9382 Error: 0.10138 Loss:0.09692 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.60s\n", - "982096 Examples seen. Accuracy:0.9397 Error: 0.18930 Loss:0.14706 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.60s\n", - "982736 Examples seen. Accuracy:0.9392 Error: 0.15525 Loss:0.16459 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.60s\n", - "983376 Examples seen. Accuracy:0.9389 Error: 0.17220 Loss:0.19944 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.61s\n", - "984016 Examples seen. Accuracy:0.9375 Error: 0.13405 Loss:0.11980 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.63s\n", - "984656 Examples seen. Accuracy:0.9381 Error: 0.23697 Loss:0.39276 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.61s\n", - "985296 Examples seen. Accuracy:0.9386 Error: 0.17989 Loss:0.19990 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.62s\n", - "985936 Examples seen. Accuracy:0.9391 Error: 0.19084 Loss:0.17228 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.61s\n", - "986576 Examples seen. Accuracy:0.9397 Error: 0.12895 Loss:0.14391 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.65s\n", - "987216 Examples seen. Accuracy:0.9412 Error: 0.09183 Loss:0.07460 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.62s\n", - "987856 Examples seen. Accuracy:0.9402 Error: 0.24106 Loss:0.31260 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.62s\n", - "988496 Examples seen. Accuracy:0.9398 Error: 0.27409 Loss:0.36580 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.61s\n", - "989136 Examples seen. Accuracy:0.9399 Error: 0.19434 Loss:0.14114 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.62s\n", - "989776 Examples seen. Accuracy:0.9397 Error: 0.18174 Loss:0.21309 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.62s\n", - "990416 Examples seen. Accuracy:0.9383 Error: 0.17824 Loss:0.16778 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.64s\n", - "991056 Examples seen. Accuracy:0.9383 Error: 0.16129 Loss:0.20397 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.67s\n", - "991696 Examples seen. Accuracy:0.9375 Error: 0.22722 Loss:0.18484 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.65s\n", - "992336 Examples seen. Accuracy:0.9355 Error: 0.09220 Loss:0.06090 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.65s\n", - "992976 Examples seen. Accuracy:0.9365 Error: 0.13139 Loss:0.14182 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.64s\n", - "993616 Examples seen. Accuracy:0.9374 Error: 0.10060 Loss:0.06814 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.63s\n", - "994256 Examples seen. Accuracy:0.9379 Error: 0.20826 Loss:0.28773 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.69s\n", - "994896 Examples seen. Accuracy:0.9384 Error: 0.15047 Loss:0.20643 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.62s\n", - "995536 Examples seen. Accuracy:0.9381 Error: 0.11685 Loss:0.11920 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.65s\n", - "996176 Examples seen. Accuracy:0.9391 Error: 0.10343 Loss:0.09796 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.65s\n", - "996816 Examples seen. Accuracy:0.9393 Error: 0.12977 Loss:0.14622 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.68s\n", - "997456 Examples seen. Accuracy:0.9386 Error: 0.10276 Loss:0.06828 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.70s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 20 Examples seen:998080 Validation Accuracy: 0.9754 Validation Error: 0.0716 Validation Loss: 0.0771 Total time: 110.10min\n", - "Starting Testing.\n", - "Epochs: 20 Examples seen:998080 Test Accuracy: 0.9819 Test Error: 0.0635 Test Loss: 0.0574 Total time: 110.57min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 20. Working time: 1.84 hours.\n", - "Learning rate set to:0.00082\n", - "998720 Examples seen. Accuracy:0.9380 Error: 0.15606 Loss:0.11226 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.69s\n", - "999360 Examples seen. Accuracy:0.9380 Error: 0.17582 Loss:0.20992 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.66s\n", - "1000000 Examples seen. Accuracy:0.9390 Error: 0.16061 Loss:0.11689 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.67s\n", - "1000640 Examples seen. Accuracy:0.9385 Error: 0.18791 Loss:0.21620 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.64s\n", - "1001280 Examples seen. Accuracy:0.9387 Error: 0.15905 Loss:0.11588 Threads: 8 Forward time: 5.35s Backward time: 3.57s Step time: 5.28s\n", - "1001920 Examples seen. Accuracy:0.9381 Error: 0.12402 Loss:0.09285 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 4.23s\n", - "1002560 Examples seen. Accuracy:0.9380 Error: 0.20191 Loss:0.21335 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.72s\n", - "1003200 Examples seen. Accuracy:0.9387 Error: 0.14123 Loss:0.25109 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.74s\n", - "1003840 Examples seen. Accuracy:0.9368 Error: 0.31502 Loss:0.49339 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1004480 Examples seen. Accuracy:0.9373 Error: 0.05675 Loss:0.04700 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.71s\n", - "1005120 Examples seen. Accuracy:0.9370 Error: 0.15868 Loss:0.16694 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.72s\n", - "1005760 Examples seen. Accuracy:0.9374 Error: 0.24801 Loss:0.42999 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.70s\n", - "1006400 Examples seen. Accuracy:0.9394 Error: 0.14046 Loss:0.10851 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.70s\n", - "1007040 Examples seen. Accuracy:0.9394 Error: 0.10072 Loss:0.10989 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.68s\n", - "1007680 Examples seen. Accuracy:0.9389 Error: 0.19668 Loss:0.20501 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.66s\n", - "1008320 Examples seen. Accuracy:0.9388 Error: 0.12827 Loss:0.13005 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.69s\n", - "1008960 Examples seen. Accuracy:0.9397 Error: 0.24293 Loss:0.41236 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.66s\n", - "1009600 Examples seen. Accuracy:0.9396 Error: 0.10360 Loss:0.07998 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.65s\n", - "1010240 Examples seen. Accuracy:0.9385 Error: 0.12878 Loss:0.18624 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.76s\n", - "1010880 Examples seen. Accuracy:0.9391 Error: 0.15860 Loss:0.14407 Threads: 8 Forward time: 5.14s Backward time: 3.29s Step time: 3.71s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1011520 Examples seen. Accuracy:0.9406 Error: 0.10252 Loss:0.07616 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.68s\n", - "1012160 Examples seen. Accuracy:0.9400 Error: 0.17599 Loss:0.17918 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.66s\n", - "1012800 Examples seen. Accuracy:0.9402 Error: 0.07600 Loss:0.05283 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.65s\n", - "1013440 Examples seen. Accuracy:0.9412 Error: 0.13442 Loss:0.11871 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.65s\n", - "1014080 Examples seen. Accuracy:0.9410 Error: 0.15921 Loss:0.14679 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.65s\n", - "1014720 Examples seen. Accuracy:0.9400 Error: 0.12354 Loss:0.15243 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.68s\n", - "1015360 Examples seen. Accuracy:0.9400 Error: 0.12499 Loss:0.09048 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.71s\n", - "1016000 Examples seen. Accuracy:0.9417 Error: 0.13423 Loss:0.14851 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.75s\n", - "1016640 Examples seen. Accuracy:0.9416 Error: 0.24924 Loss:0.25736 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.69s\n", - "1017280 Examples seen. Accuracy:0.9422 Error: 0.08832 Loss:0.05413 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.69s\n", - "1017920 Examples seen. Accuracy:0.9409 Error: 0.18979 Loss:0.19725 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.71s\n", - "1018560 Examples seen. Accuracy:0.9401 Error: 0.20400 Loss:0.22846 Threads: 8 Forward time: 5.05s Backward time: 3.34s Step time: 3.75s\n", - "1019200 Examples seen. Accuracy:0.9398 Error: 0.19464 Loss:0.16639 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 4.29s\n", - "1019840 Examples seen. Accuracy:0.9395 Error: 0.20691 Loss:0.21282 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.72s\n", - "1020480 Examples seen. Accuracy:0.9421 Error: 0.11174 Loss:0.10274 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.67s\n", - "1021120 Examples seen. Accuracy:0.9417 Error: 0.13866 Loss:0.15672 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.68s\n", - "1021760 Examples seen. Accuracy:0.9409 Error: 0.11041 Loss:0.15807 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.66s\n", - "1022400 Examples seen. Accuracy:0.9403 Error: 0.14476 Loss:0.13817 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1023040 Examples seen. Accuracy:0.9405 Error: 0.09324 Loss:0.07487 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.67s\n", - "1023680 Examples seen. Accuracy:0.9419 Error: 0.09300 Loss:0.05732 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.68s\n", - "1024320 Examples seen. Accuracy:0.9409 Error: 0.11892 Loss:0.08042 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.67s\n", - "1024960 Examples seen. Accuracy:0.9409 Error: 0.17977 Loss:0.14857 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.70s\n", - "1025600 Examples seen. Accuracy:0.9399 Error: 0.10192 Loss:0.07094 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.65s\n", - "1026240 Examples seen. Accuracy:0.9409 Error: 0.20096 Loss:0.26709 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.65s\n", - "1026880 Examples seen. Accuracy:0.9423 Error: 0.19891 Loss:0.22236 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.66s\n", - "1027520 Examples seen. Accuracy:0.9417 Error: 0.14905 Loss:0.14301 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.67s\n", - "1028160 Examples seen. Accuracy:0.9421 Error: 0.11452 Loss:0.07844 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.68s\n", - "1028800 Examples seen. Accuracy:0.9419 Error: 0.21365 Loss:0.28618 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.68s\n", - "1029440 Examples seen. Accuracy:0.9402 Error: 0.21966 Loss:0.26615 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.69s\n", - "1030080 Examples seen. Accuracy:0.9408 Error: 0.16668 Loss:0.15809 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.66s\n", - "1030720 Examples seen. Accuracy:0.9412 Error: 0.06505 Loss:0.03781 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.67s\n", - "1031360 Examples seen. Accuracy:0.9430 Error: 0.12582 Loss:0.14228 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.68s\n", - "1032000 Examples seen. Accuracy:0.9438 Error: 0.08969 Loss:0.06734 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.68s\n", - "1032640 Examples seen. Accuracy:0.9432 Error: 0.16696 Loss:0.17333 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.68s\n", - "1033280 Examples seen. Accuracy:0.9434 Error: 0.18620 Loss:0.21042 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "1033920 Examples seen. Accuracy:0.9434 Error: 0.23867 Loss:0.19954 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.66s\n", - "1034560 Examples seen. Accuracy:0.9428 Error: 0.15853 Loss:0.15873 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.68s\n", - "1035200 Examples seen. Accuracy:0.9413 Error: 0.16289 Loss:0.14258 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.67s\n", - "1035840 Examples seen. Accuracy:0.9406 Error: 0.21517 Loss:0.25260 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.69s\n", - "1036480 Examples seen. Accuracy:0.9417 Error: 0.16048 Loss:0.15307 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.66s\n", - "1037120 Examples seen. Accuracy:0.9418 Error: 0.28925 Loss:0.31492 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.66s\n", - "1037760 Examples seen. Accuracy:0.9411 Error: 0.19507 Loss:0.16127 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.70s\n", - "1038400 Examples seen. Accuracy:0.9401 Error: 0.10647 Loss:0.16149 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.65s\n", - "1039040 Examples seen. Accuracy:0.9404 Error: 0.22006 Loss:0.20029 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.72s\n", - "1039680 Examples seen. Accuracy:0.9391 Error: 0.25526 Loss:0.39782 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.70s\n", - "1040320 Examples seen. Accuracy:0.9391 Error: 0.08865 Loss:0.06161 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.75s\n", - "1040960 Examples seen. Accuracy:0.9401 Error: 0.15010 Loss:0.13009 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.75s\n", - "1041600 Examples seen. Accuracy:0.9397 Error: 0.16926 Loss:0.15399 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.69s\n", - "1042240 Examples seen. Accuracy:0.9407 Error: 0.11042 Loss:0.13679 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.72s\n", - "1042880 Examples seen. Accuracy:0.9402 Error: 0.08517 Loss:0.05677 Threads: 8 Forward time: 5.03s Backward time: 3.30s Step time: 3.70s\n", - "1043520 Examples seen. Accuracy:0.9399 Error: 0.15405 Loss:0.21766 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.76s\n", - "1044160 Examples seen. Accuracy:0.9399 Error: 0.12258 Loss:0.11280 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.76s\n", - "1044800 Examples seen. Accuracy:0.9390 Error: 0.14773 Loss:0.17697 Threads: 8 Forward time: 5.09s Backward time: 3.32s Step time: 3.80s\n", - "1045440 Examples seen. Accuracy:0.9389 Error: 0.14281 Loss:0.16803 Threads: 8 Forward time: 5.05s Backward time: 3.31s Step time: 3.73s\n", - "1046080 Examples seen. Accuracy:0.9407 Error: 0.12623 Loss:0.12138 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.72s\n", - "1046720 Examples seen. Accuracy:0.9402 Error: 0.10736 Loss:0.13168 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.81s\n", - "1047360 Examples seen. Accuracy:0.9409 Error: 0.07710 Loss:0.04530 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.80s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 21 Examples seen:1047984 Validation Accuracy: 0.9757 Validation Error: 0.0693 Validation Loss: 0.0751 Total time: 115.90min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 21. Working time: 1.93 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1048624 Examples seen. Accuracy:0.9416 Error: 0.22140 Loss:0.27706 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.73s\n", - "1049264 Examples seen. Accuracy:0.9417 Error: 0.07956 Loss:0.04851 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.70s\n", - "1049904 Examples seen. Accuracy:0.9418 Error: 0.13258 Loss:0.09884 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.72s\n", - "1050544 Examples seen. Accuracy:0.9417 Error: 0.08471 Loss:0.05805 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.75s\n", - "1051184 Examples seen. Accuracy:0.9405 Error: 0.16332 Loss:0.15157 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.72s\n", - "1051824 Examples seen. Accuracy:0.9418 Error: 0.09365 Loss:0.08231 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.72s\n", - "1052464 Examples seen. Accuracy:0.9430 Error: 0.12558 Loss:0.08224 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.72s\n", - "1053104 Examples seen. Accuracy:0.9451 Error: 0.05609 Loss:0.07770 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.67s\n", - "1053744 Examples seen. Accuracy:0.9462 Error: 0.06936 Loss:0.04095 Threads: 8 Forward time: 4.91s Backward time: 3.27s Step time: 3.67s\n", - "1054384 Examples seen. Accuracy:0.9464 Error: 0.17336 Loss:0.16978 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.67s\n", - "1055024 Examples seen. Accuracy:0.9467 Error: 0.07723 Loss:0.05837 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.72s\n", - "1055664 Examples seen. Accuracy:0.9461 Error: 0.09990 Loss:0.06956 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.66s\n", - "1056304 Examples seen. Accuracy:0.9462 Error: 0.17575 Loss:0.17882 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.66s\n", - "1056944 Examples seen. Accuracy:0.9452 Error: 0.10842 Loss:0.08968 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.68s\n", - "1057584 Examples seen. Accuracy:0.9447 Error: 0.23762 Loss:0.23375 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.70s\n", - "1058224 Examples seen. Accuracy:0.9453 Error: 0.09969 Loss:0.11526 Threads: 8 Forward time: 4.92s Backward time: 3.33s Step time: 3.69s\n", - "1058864 Examples seen. Accuracy:0.9457 Error: 0.14085 Loss:0.09528 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.72s\n", - "1059504 Examples seen. Accuracy:0.9451 Error: 0.09112 Loss:0.06185 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.67s\n", - "1060144 Examples seen. Accuracy:0.9457 Error: 0.17827 Loss:0.14106 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.68s\n", - "1060784 Examples seen. Accuracy:0.9452 Error: 0.14503 Loss:0.15594 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.67s\n", - "1061424 Examples seen. Accuracy:0.9423 Error: 0.16498 Loss:0.25695 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.70s\n", - "1062064 Examples seen. Accuracy:0.9404 Error: 0.16148 Loss:0.15608 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.67s\n", - "1062704 Examples seen. Accuracy:0.9398 Error: 0.20150 Loss:0.26722 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.70s\n", - "1063344 Examples seen. Accuracy:0.9389 Error: 0.19005 Loss:0.21478 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.65s\n", - "1063984 Examples seen. Accuracy:0.9382 Error: 0.19322 Loss:0.20439 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.71s\n", - "1064624 Examples seen. Accuracy:0.9388 Error: 0.26689 Loss:0.28840 Threads: 8 Forward time: 5.06s Backward time: 3.30s Step time: 3.71s\n", - "1065264 Examples seen. Accuracy:0.9407 Error: 0.12291 Loss:0.10567 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.73s\n", - "1065904 Examples seen. Accuracy:0.9408 Error: 0.19481 Loss:0.15827 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.69s\n", - "1066544 Examples seen. Accuracy:0.9393 Error: 0.05947 Loss:0.03649 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.74s\n", - "1067184 Examples seen. Accuracy:0.9397 Error: 0.17932 Loss:0.18402 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.70s\n", - "1067824 Examples seen. Accuracy:0.9417 Error: 0.16040 Loss:0.11998 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.72s\n", - "1068464 Examples seen. Accuracy:0.9419 Error: 0.15005 Loss:0.10680 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.66s\n", - "1069104 Examples seen. Accuracy:0.9416 Error: 0.14459 Loss:0.12605 Threads: 8 Forward time: 5.04s Backward time: 3.33s Step time: 3.68s\n", - "1069744 Examples seen. Accuracy:0.9419 Error: 0.16791 Loss:0.17241 Threads: 8 Forward time: 5.08s Backward time: 3.31s Step time: 3.76s\n", - "1070384 Examples seen. Accuracy:0.9413 Error: 0.17009 Loss:0.16272 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.68s\n", - "1071024 Examples seen. Accuracy:0.9409 Error: 0.11426 Loss:0.08304 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.68s\n", - "1071664 Examples seen. Accuracy:0.9405 Error: 0.20175 Loss:0.24715 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.72s\n", - "1072304 Examples seen. Accuracy:0.9399 Error: 0.21003 Loss:0.19355 Threads: 8 Forward time: 5.02s Backward time: 3.37s Step time: 3.73s\n", - "1072944 Examples seen. Accuracy:0.9400 Error: 0.19522 Loss:0.14412 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 4.29s\n", - "1073584 Examples seen. Accuracy:0.9417 Error: 0.15198 Loss:0.14219 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.71s\n", - "1074224 Examples seen. Accuracy:0.9408 Error: 0.10407 Loss:0.13263 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.70s\n", - "1074864 Examples seen. Accuracy:0.9415 Error: 0.17778 Loss:0.16864 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.70s\n", - "1075504 Examples seen. Accuracy:0.9425 Error: 0.13496 Loss:0.14462 Threads: 8 Forward time: 5.11s Backward time: 3.40s Step time: 3.79s\n", - "1076144 Examples seen. Accuracy:0.9425 Error: 0.09835 Loss:0.11468 Threads: 8 Forward time: 5.04s Backward time: 3.33s Step time: 3.72s\n", - "1076784 Examples seen. Accuracy:0.9425 Error: 0.18631 Loss:0.16566 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.70s\n", - "1077424 Examples seen. Accuracy:0.9419 Error: 0.21663 Loss:0.27832 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.70s\n", - "1078064 Examples seen. Accuracy:0.9421 Error: 0.15323 Loss:0.11182 Threads: 8 Forward time: 5.09s Backward time: 3.33s Step time: 3.73s\n", - "1078704 Examples seen. Accuracy:0.9418 Error: 0.16705 Loss:0.18876 Threads: 8 Forward time: 5.30s Backward time: 3.41s Step time: 3.79s\n", - "1079344 Examples seen. Accuracy:0.9407 Error: 0.17315 Loss:0.16612 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.72s\n", - "1079984 Examples seen. Accuracy:0.9407 Error: 0.19254 Loss:0.17098 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.75s\n", - "1080624 Examples seen. Accuracy:0.9406 Error: 0.17402 Loss:0.15303 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.69s\n", - "1081264 Examples seen. Accuracy:0.9397 Error: 0.12523 Loss:0.11633 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.68s\n", - "1081904 Examples seen. Accuracy:0.9395 Error: 0.13804 Loss:0.10574 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.68s\n", - "1082544 Examples seen. Accuracy:0.9402 Error: 0.08202 Loss:0.05414 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.71s\n", - "1083184 Examples seen. Accuracy:0.9404 Error: 0.10537 Loss:0.07908 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.70s\n", - "1083824 Examples seen. Accuracy:0.9416 Error: 0.14569 Loss:0.10550 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.67s\n", - "1084464 Examples seen. Accuracy:0.9412 Error: 0.17179 Loss:0.15162 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.66s\n", - "1085104 Examples seen. Accuracy:0.9409 Error: 0.12321 Loss:0.11928 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.67s\n", - "1085744 Examples seen. Accuracy:0.9412 Error: 0.14831 Loss:0.15116 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1086384 Examples seen. Accuracy:0.9416 Error: 0.12226 Loss:0.09902 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.75s\n", - "1087024 Examples seen. Accuracy:0.9425 Error: 0.08110 Loss:0.05480 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.68s\n", - "1087664 Examples seen. Accuracy:0.9420 Error: 0.14540 Loss:0.12781 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.68s\n", - "1088304 Examples seen. Accuracy:0.9429 Error: 0.09252 Loss:0.08647 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.71s\n", - "1088944 Examples seen. Accuracy:0.9429 Error: 0.12294 Loss:0.13806 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.71s\n", - "1089584 Examples seen. Accuracy:0.9441 Error: 0.09385 Loss:0.06357 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.72s\n", - "1090224 Examples seen. Accuracy:0.9422 Error: 0.18190 Loss:0.17900 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.72s\n", - "1090864 Examples seen. Accuracy:0.9427 Error: 0.17771 Loss:0.14552 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.72s\n", - "1091504 Examples seen. Accuracy:0.9449 Error: 0.06497 Loss:0.07365 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.71s\n", - "1092144 Examples seen. Accuracy:0.9442 Error: 0.19330 Loss:0.20501 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.66s\n", - "1092784 Examples seen. Accuracy:0.9436 Error: 0.22128 Loss:0.22584 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n", - "1093424 Examples seen. Accuracy:0.9420 Error: 0.11000 Loss:0.09795 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.66s\n", - "1094064 Examples seen. Accuracy:0.9423 Error: 0.11610 Loss:0.10722 Threads: 8 Forward time: 5.09s Backward time: 3.32s Step time: 3.71s\n", - "1094704 Examples seen. Accuracy:0.9427 Error: 0.14846 Loss:0.13134 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.69s\n", - "1095344 Examples seen. Accuracy:0.9412 Error: 0.15211 Loss:0.13793 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.67s\n", - "1095984 Examples seen. Accuracy:0.9411 Error: 0.14959 Loss:0.13270 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.67s\n", - "1096624 Examples seen. Accuracy:0.9424 Error: 0.13953 Loss:0.11018 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.68s\n", - "1097264 Examples seen. Accuracy:0.9425 Error: 0.12376 Loss:0.12334 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.74s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 22 Examples seen:1097888 Validation Accuracy: 0.9783 Validation Error: 0.0654 Validation Loss: 0.0716 Total time: 121.21min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 22. Working time: 2.02 hours.\n", - "1098528 Examples seen. Accuracy:0.9423 Error: 0.09284 Loss:0.06167 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.73s\n", - "1099168 Examples seen. Accuracy:0.9438 Error: 0.08001 Loss:0.14173 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1099808 Examples seen. Accuracy:0.9434 Error: 0.22561 Loss:0.27127 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.63s\n", - "1100448 Examples seen. Accuracy:0.9434 Error: 0.23495 Loss:0.20568 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.65s\n", - "1101088 Examples seen. Accuracy:0.9433 Error: 0.11784 Loss:0.11380 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.64s\n", - "1101728 Examples seen. Accuracy:0.9425 Error: 0.18355 Loss:0.19803 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.66s\n", - "1102368 Examples seen. Accuracy:0.9425 Error: 0.23640 Loss:0.22638 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.63s\n", - "1103008 Examples seen. Accuracy:0.9429 Error: 0.11586 Loss:0.10133 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1103648 Examples seen. Accuracy:0.9439 Error: 0.10770 Loss:0.17222 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.70s\n", - "1104288 Examples seen. Accuracy:0.9431 Error: 0.17562 Loss:0.22074 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.71s\n", - "1104928 Examples seen. Accuracy:0.9429 Error: 0.17727 Loss:0.21970 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.64s\n", - "1105568 Examples seen. Accuracy:0.9419 Error: 0.14166 Loss:0.14599 Threads: 8 Forward time: 5.21s Backward time: 3.32s Step time: 3.80s\n", - "1106208 Examples seen. Accuracy:0.9432 Error: 0.11440 Loss:0.08004 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.70s\n", - "1106848 Examples seen. Accuracy:0.9439 Error: 0.12359 Loss:0.12816 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.69s\n", - "1107488 Examples seen. Accuracy:0.9441 Error: 0.20007 Loss:0.19580 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.69s\n", - "1108128 Examples seen. Accuracy:0.9457 Error: 0.18324 Loss:0.20063 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.68s\n", - "1108768 Examples seen. Accuracy:0.9462 Error: 0.20224 Loss:0.18220 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.70s\n", - "1109408 Examples seen. Accuracy:0.9464 Error: 0.09220 Loss:0.08859 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.68s\n", - "1110048 Examples seen. Accuracy:0.9459 Error: 0.13530 Loss:0.10695 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1110688 Examples seen. Accuracy:0.9443 Error: 0.15863 Loss:0.13609 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.69s\n", - "1111328 Examples seen. Accuracy:0.9436 Error: 0.18403 Loss:0.17732 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1111968 Examples seen. Accuracy:0.9424 Error: 0.23619 Loss:0.26713 Threads: 8 Forward time: 5.04s Backward time: 3.31s Step time: 3.71s\n", - "1112608 Examples seen. Accuracy:0.9416 Error: 0.09557 Loss:0.07480 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.68s\n", - "1113248 Examples seen. Accuracy:0.9417 Error: 0.20328 Loss:0.22807 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.71s\n", - "1113888 Examples seen. Accuracy:0.9417 Error: 0.13156 Loss:0.16044 Threads: 8 Forward time: 5.05s Backward time: 3.31s Step time: 3.71s\n", - "1114528 Examples seen. Accuracy:0.9428 Error: 0.17281 Loss:0.16819 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1115168 Examples seen. Accuracy:0.9429 Error: 0.08148 Loss:0.07883 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.71s\n", - "1115808 Examples seen. Accuracy:0.9433 Error: 0.17055 Loss:0.13726 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.68s\n", - "1116448 Examples seen. Accuracy:0.9436 Error: 0.14327 Loss:0.22397 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.68s\n", - "1117088 Examples seen. Accuracy:0.9446 Error: 0.17658 Loss:0.13030 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.69s\n", - "1117728 Examples seen. Accuracy:0.9446 Error: 0.12643 Loss:0.13459 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.68s\n", - "1118368 Examples seen. Accuracy:0.9456 Error: 0.08116 Loss:0.08514 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 3.75s\n", - "1119008 Examples seen. Accuracy:0.9452 Error: 0.12995 Loss:0.09499 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.74s\n", - "1119648 Examples seen. Accuracy:0.9436 Error: 0.27118 Loss:0.34248 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.71s\n", - "1120288 Examples seen. Accuracy:0.9426 Error: 0.17803 Loss:0.23948 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.70s\n", - "1120928 Examples seen. Accuracy:0.9413 Error: 0.10791 Loss:0.15848 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.69s\n", - "1121568 Examples seen. Accuracy:0.9408 Error: 0.09225 Loss:0.06921 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.69s\n", - "1122208 Examples seen. Accuracy:0.9415 Error: 0.18588 Loss:0.15055 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1122848 Examples seen. Accuracy:0.9417 Error: 0.12132 Loss:0.12098 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.73s\n", - "1123488 Examples seen. Accuracy:0.9402 Error: 0.12329 Loss:0.12068 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.71s\n", - "1124128 Examples seen. Accuracy:0.9416 Error: 0.12067 Loss:0.10317 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.70s\n", - "1124768 Examples seen. Accuracy:0.9414 Error: 0.15002 Loss:0.11572 Threads: 8 Forward time: 5.09s Backward time: 3.30s Step time: 3.72s\n", - "1125408 Examples seen. Accuracy:0.9415 Error: 0.14510 Loss:0.16107 Threads: 8 Forward time: 5.05s Backward time: 3.29s Step time: 3.72s\n", - "1126048 Examples seen. Accuracy:0.9412 Error: 0.18075 Loss:0.17556 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.73s\n", - "1126688 Examples seen. Accuracy:0.9413 Error: 0.14880 Loss:0.10763 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 4.33s\n", - "1127328 Examples seen. Accuracy:0.9419 Error: 0.12706 Loss:0.13520 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.71s\n", - "1127968 Examples seen. Accuracy:0.9406 Error: 0.24491 Loss:0.26587 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.64s\n", - "1128608 Examples seen. Accuracy:0.9409 Error: 0.17438 Loss:0.18315 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.61s\n", - "1129248 Examples seen. Accuracy:0.9405 Error: 0.21757 Loss:0.21919 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.64s\n", - "1129888 Examples seen. Accuracy:0.9407 Error: 0.15371 Loss:0.12137 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.66s\n", - "1130528 Examples seen. Accuracy:0.9410 Error: 0.10420 Loss:0.06490 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.62s\n", - "1131168 Examples seen. Accuracy:0.9424 Error: 0.15544 Loss:0.10561 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.64s\n", - "1131808 Examples seen. Accuracy:0.9432 Error: 0.13716 Loss:0.09079 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.62s\n", - "1132448 Examples seen. Accuracy:0.9430 Error: 0.22261 Loss:0.18629 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.65s\n", - "1133088 Examples seen. Accuracy:0.9431 Error: 0.21223 Loss:0.21449 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.64s\n", - "1133728 Examples seen. Accuracy:0.9435 Error: 0.11453 Loss:0.09399 Threads: 8 Forward time: 5.05s Backward time: 3.33s Step time: 3.70s\n", - "1134368 Examples seen. Accuracy:0.9422 Error: 0.23880 Loss:0.28010 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.68s\n", - "1135008 Examples seen. Accuracy:0.9423 Error: 0.10716 Loss:0.08877 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.70s\n", - "1135648 Examples seen. Accuracy:0.9419 Error: 0.19055 Loss:0.17048 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.63s\n", - "1136288 Examples seen. Accuracy:0.9423 Error: 0.25915 Loss:0.30452 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.63s\n", - "1136928 Examples seen. Accuracy:0.9433 Error: 0.11288 Loss:0.09032 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.63s\n", - "1137568 Examples seen. Accuracy:0.9430 Error: 0.21124 Loss:0.18885 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.64s\n", - "1138208 Examples seen. Accuracy:0.9425 Error: 0.15065 Loss:0.16672 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.64s\n", - "1138848 Examples seen. Accuracy:0.9428 Error: 0.13827 Loss:0.13103 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.73s\n", - "1139488 Examples seen. Accuracy:0.9421 Error: 0.16019 Loss:0.17581 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.64s\n", - "1140128 Examples seen. Accuracy:0.9431 Error: 0.10862 Loss:0.09329 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.68s\n", - "1140768 Examples seen. Accuracy:0.9429 Error: 0.09697 Loss:0.05925 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.75s\n", - "1141408 Examples seen. Accuracy:0.9422 Error: 0.13361 Loss:0.14225 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.64s\n", - "1142048 Examples seen. Accuracy:0.9419 Error: 0.15342 Loss:0.12560 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.65s\n", - "1142688 Examples seen. Accuracy:0.9419 Error: 0.12563 Loss:0.09840 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.64s\n", - "1143328 Examples seen. Accuracy:0.9433 Error: 0.14517 Loss:0.11939 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.67s\n", - "1143968 Examples seen. Accuracy:0.9434 Error: 0.10243 Loss:0.07846 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "1144608 Examples seen. Accuracy:0.9435 Error: 0.06028 Loss:0.03717 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.64s\n", - "1145248 Examples seen. Accuracy:0.9437 Error: 0.14712 Loss:0.11465 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1145888 Examples seen. Accuracy:0.9445 Error: 0.17405 Loss:0.22456 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1146528 Examples seen. Accuracy:0.9452 Error: 0.07290 Loss:0.07826 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.65s\n", - "1147168 Examples seen. Accuracy:0.9450 Error: 0.07994 Loss:0.07179 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 23 Examples seen:1147792 Validation Accuracy: 0.9783 Validation Error: 0.0633 Validation Loss: 0.0684 Total time: 126.46min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 23. Working time: 2.11 hours.\n", - "1148432 Examples seen. Accuracy:0.9444 Error: 0.10659 Loss:0.08178 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "1149072 Examples seen. Accuracy:0.9446 Error: 0.08228 Loss:0.05780 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.75s\n", - "1149712 Examples seen. Accuracy:0.9461 Error: 0.06611 Loss:0.04354 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.69s\n", - "1150352 Examples seen. Accuracy:0.9443 Error: 0.14121 Loss:0.09172 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.71s\n", - "1150992 Examples seen. Accuracy:0.9447 Error: 0.11143 Loss:0.10094 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.75s\n", - "1151632 Examples seen. Accuracy:0.9441 Error: 0.20640 Loss:0.18981 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.68s\n", - "1152272 Examples seen. Accuracy:0.9451 Error: 0.09881 Loss:0.06911 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 4.14s\n", - "1152912 Examples seen. Accuracy:0.9447 Error: 0.13251 Loss:0.15502 Threads: 8 Forward time: 5.09s Backward time: 3.34s Step time: 3.71s\n", - "1153552 Examples seen. Accuracy:0.9452 Error: 0.16591 Loss:0.12209 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.71s\n", - "1154192 Examples seen. Accuracy:0.9454 Error: 0.07522 Loss:0.05972 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.67s\n", - "1154832 Examples seen. Accuracy:0.9453 Error: 0.11190 Loss:0.10006 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.67s\n", - "1155472 Examples seen. Accuracy:0.9465 Error: 0.11870 Loss:0.14309 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.65s\n", - "1156112 Examples seen. Accuracy:0.9463 Error: 0.14962 Loss:0.15763 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "1156752 Examples seen. Accuracy:0.9457 Error: 0.14971 Loss:0.17850 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.68s\n", - "1157392 Examples seen. Accuracy:0.9460 Error: 0.15542 Loss:0.18851 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.67s\n", - "1158032 Examples seen. Accuracy:0.9460 Error: 0.17875 Loss:0.16656 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.62s\n", - "1158672 Examples seen. Accuracy:0.9469 Error: 0.16663 Loss:0.16340 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.63s\n", - "1159312 Examples seen. Accuracy:0.9479 Error: 0.23977 Loss:0.25127 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1159952 Examples seen. Accuracy:0.9487 Error: 0.05452 Loss:0.04071 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.66s\n", - "1160592 Examples seen. Accuracy:0.9480 Error: 0.22770 Loss:0.28022 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.67s\n", - "1161232 Examples seen. Accuracy:0.9476 Error: 0.20605 Loss:0.20552 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.66s\n", - "1161872 Examples seen. Accuracy:0.9480 Error: 0.10632 Loss:0.07728 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.80s\n", - "1162512 Examples seen. Accuracy:0.9470 Error: 0.17529 Loss:0.17325 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.68s\n", - "1163152 Examples seen. Accuracy:0.9477 Error: 0.07620 Loss:0.04764 Threads: 8 Forward time: 4.96s Backward time: 3.32s Step time: 3.70s\n", - "1163792 Examples seen. Accuracy:0.9470 Error: 0.19919 Loss:0.23673 Threads: 8 Forward time: 5.14s Backward time: 3.35s Step time: 3.79s\n", - "1164432 Examples seen. Accuracy:0.9476 Error: 0.10805 Loss:0.10318 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.75s\n", - "1165072 Examples seen. Accuracy:0.9473 Error: 0.10546 Loss:0.11530 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.71s\n", - "1165712 Examples seen. Accuracy:0.9480 Error: 0.09670 Loss:0.07497 Threads: 8 Forward time: 5.03s Backward time: 3.28s Step time: 3.70s\n", - "1166352 Examples seen. Accuracy:0.9486 Error: 0.21044 Loss:0.22811 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.71s\n", - "1166992 Examples seen. Accuracy:0.9496 Error: 0.15267 Loss:0.19842 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 4.68s\n", - "1167632 Examples seen. Accuracy:0.9499 Error: 0.11576 Loss:0.09334 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 4.00s\n", - "1168272 Examples seen. Accuracy:0.9487 Error: 0.25070 Loss:0.26194 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 4.73s\n", - "1168912 Examples seen. Accuracy:0.9490 Error: 0.15279 Loss:0.13863 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.79s\n", - "1169552 Examples seen. Accuracy:0.9496 Error: 0.13206 Loss:0.15998 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.91s\n", - "1170192 Examples seen. Accuracy:0.9484 Error: 0.20045 Loss:0.28935 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.79s\n", - "1170832 Examples seen. Accuracy:0.9476 Error: 0.18495 Loss:0.19545 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.80s\n", - "1171472 Examples seen. Accuracy:0.9464 Error: 0.09976 Loss:0.08665 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.79s\n", - "1172112 Examples seen. Accuracy:0.9461 Error: 0.14884 Loss:0.14387 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.80s\n", - "1172752 Examples seen. Accuracy:0.9453 Error: 0.14433 Loss:0.12570 Threads: 8 Forward time: 5.07s Backward time: 3.30s Step time: 3.79s\n", - "1173392 Examples seen. Accuracy:0.9448 Error: 0.20144 Loss:0.20165 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.74s\n", - "1174032 Examples seen. Accuracy:0.9453 Error: 0.14749 Loss:0.13590 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.68s\n", - "1174672 Examples seen. Accuracy:0.9438 Error: 0.10591 Loss:0.07765 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.78s\n", - "1175312 Examples seen. Accuracy:0.9432 Error: 0.15394 Loss:0.14875 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.71s\n", - "1175952 Examples seen. Accuracy:0.9441 Error: 0.21772 Loss:0.19824 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.70s\n", - "1176592 Examples seen. Accuracy:0.9443 Error: 0.13137 Loss:0.09814 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.68s\n", - "1177232 Examples seen. Accuracy:0.9441 Error: 0.14829 Loss:0.16312 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.72s\n", - "1177872 Examples seen. Accuracy:0.9444 Error: 0.12328 Loss:0.11496 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.70s\n", - "1178512 Examples seen. Accuracy:0.9449 Error: 0.09082 Loss:0.05992 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.70s\n", - "1179152 Examples seen. Accuracy:0.9454 Error: 0.16169 Loss:0.15060 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "1179792 Examples seen. Accuracy:0.9446 Error: 0.16590 Loss:0.20085 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.71s\n", - "1180432 Examples seen. Accuracy:0.9450 Error: 0.11239 Loss:0.07653 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.73s\n", - "1181072 Examples seen. Accuracy:0.9459 Error: 0.18015 Loss:0.14598 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 4.31s\n", - "1181712 Examples seen. Accuracy:0.9442 Error: 0.14710 Loss:0.13437 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.71s\n", - "1182352 Examples seen. Accuracy:0.9448 Error: 0.09371 Loss:0.06447 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.68s\n", - "1182992 Examples seen. Accuracy:0.9437 Error: 0.10926 Loss:0.14950 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1183632 Examples seen. Accuracy:0.9440 Error: 0.19048 Loss:0.14861 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "1184272 Examples seen. Accuracy:0.9433 Error: 0.11949 Loss:0.13171 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.68s\n", - "1184912 Examples seen. Accuracy:0.9441 Error: 0.21216 Loss:0.18976 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1185552 Examples seen. Accuracy:0.9440 Error: 0.15406 Loss:0.12353 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n", - "1186192 Examples seen. Accuracy:0.9441 Error: 0.10416 Loss:0.09012 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.66s\n", - "1186832 Examples seen. Accuracy:0.9457 Error: 0.04256 Loss:0.02493 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1187472 Examples seen. Accuracy:0.9476 Error: 0.06884 Loss:0.04220 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1188112 Examples seen. Accuracy:0.9477 Error: 0.09049 Loss:0.19988 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1188752 Examples seen. Accuracy:0.9478 Error: 0.16043 Loss:0.21767 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.68s\n", - "1189392 Examples seen. Accuracy:0.9473 Error: 0.07894 Loss:0.06675 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.67s\n", - "1190032 Examples seen. Accuracy:0.9458 Error: 0.22327 Loss:0.21855 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.69s\n", - "1190672 Examples seen. Accuracy:0.9463 Error: 0.19136 Loss:0.43410 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.68s\n", - "1191312 Examples seen. Accuracy:0.9458 Error: 0.16086 Loss:0.14695 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.70s\n", - "1191952 Examples seen. Accuracy:0.9452 Error: 0.16623 Loss:0.13273 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.67s\n", - "1192592 Examples seen. Accuracy:0.9443 Error: 0.15744 Loss:0.19732 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.70s\n", - "1193232 Examples seen. Accuracy:0.9451 Error: 0.08387 Loss:0.10092 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.69s\n", - "1193872 Examples seen. Accuracy:0.9421 Error: 0.21001 Loss:0.25713 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1194512 Examples seen. Accuracy:0.9418 Error: 0.13565 Loss:0.19294 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "1195152 Examples seen. Accuracy:0.9420 Error: 0.16291 Loss:0.19515 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.68s\n", - "1195792 Examples seen. Accuracy:0.9427 Error: 0.11184 Loss:0.10038 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.68s\n", - "1196432 Examples seen. Accuracy:0.9437 Error: 0.10837 Loss:0.11333 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "1197072 Examples seen. Accuracy:0.9454 Error: 0.14414 Loss:0.13828 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 24 Examples seen:1197696 Validation Accuracy: 0.9804 Validation Error: 0.0610 Validation Loss: 0.0669 Total time: 131.81min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.409 Min Weight: -0.325 Max Output: 5.613 Min Output: -5.392 TNNetConvolutionLinear 66,66,64 Times: 8.60s 0.40s Parent:0\n", - "Layer 2 Max Output: 5.613 Min Output: -3.228 TNNetMaxPool 33,33,64 Times: 3.59s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.605 Min Weight: 0.262 Max Output: 8.851 Min Output: -5.771 TNNetMovingStdNormalization 33,33,64 Times: 0.29s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.332 Min Weight: -0.205 Max Output: 10.548 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.90s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.346 Min Weight: -0.338 Max Output: 10.886 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.89s 0.18s Parent:4\n", - "Layer 6 Max Output: 10.886 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.50s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.377 Min Weight: -0.248 Max Output: 7.765 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.255 Min Weight: -0.235 Max Output: 5.471 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.46s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.235 Min Weight: -0.208 Max Output: 8.848 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.48s 0.02s Parent:8\n", - "Layer 10 Max Output: 8.848 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 8.848 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.385 Min Weight: -0.375 Max Output: 25.601 Min Output: -10.537 TNNetFullConnectLinear 39,1,1 Times: 0.04s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 24. Working time: 2.2 hours.\n", - "1198336 Examples seen. Accuracy:0.9468 Error: 0.26289 Loss:0.27406 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.81s\n", - "1198976 Examples seen. Accuracy:0.9477 Error: 0.15083 Loss:0.18740 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 4.45s\n", - "1199616 Examples seen. Accuracy:0.9490 Error: 0.14372 Loss:0.12510 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.63s\n", - "1200256 Examples seen. Accuracy:0.9485 Error: 0.08716 Loss:0.07109 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.65s\n", - "1200896 Examples seen. Accuracy:0.9488 Error: 0.10198 Loss:0.11925 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.60s\n", - "1201536 Examples seen. Accuracy:0.9476 Error: 0.06668 Loss:0.04838 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.61s\n", - "1202176 Examples seen. Accuracy:0.9473 Error: 0.08376 Loss:0.05120 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.65s\n", - "1202816 Examples seen. Accuracy:0.9459 Error: 0.26784 Loss:0.26125 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.63s\n", - "1203456 Examples seen. Accuracy:0.9460 Error: 0.09583 Loss:0.08170 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.64s\n", - "1204096 Examples seen. Accuracy:0.9433 Error: 0.15850 Loss:0.11102 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.64s\n", - "1204736 Examples seen. Accuracy:0.9440 Error: 0.12450 Loss:0.10567 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.65s\n", - "1205376 Examples seen. Accuracy:0.9444 Error: 0.09275 Loss:0.07461 Threads: 8 Forward time: 5.09s Backward time: 3.28s Step time: 3.65s\n", - "1206016 Examples seen. Accuracy:0.9435 Error: 0.09913 Loss:0.09007 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 4.32s\n", - "1206656 Examples seen. Accuracy:0.9436 Error: 0.16410 Loss:0.16115 Threads: 8 Forward time: 5.11s Backward time: 3.29s Step time: 3.75s\n", - "1207296 Examples seen. Accuracy:0.9432 Error: 0.21248 Loss:0.19766 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.64s\n", - "1207936 Examples seen. Accuracy:0.9436 Error: 0.14624 Loss:0.19549 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.61s\n", - "1208576 Examples seen. Accuracy:0.9435 Error: 0.22112 Loss:0.26161 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.60s\n", - "1209216 Examples seen. Accuracy:0.9435 Error: 0.22516 Loss:0.21452 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.60s\n", - "1209856 Examples seen. Accuracy:0.9450 Error: 0.13971 Loss:0.15646 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.61s\n", - "1210496 Examples seen. Accuracy:0.9453 Error: 0.10825 Loss:0.18735 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.60s\n", - "1211136 Examples seen. Accuracy:0.9451 Error: 0.18021 Loss:0.15731 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.63s\n", - "1211776 Examples seen. Accuracy:0.9446 Error: 0.15781 Loss:0.14680 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.62s\n", - "1212416 Examples seen. Accuracy:0.9463 Error: 0.08204 Loss:0.05657 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.62s\n", - "1213056 Examples seen. Accuracy:0.9462 Error: 0.08583 Loss:0.07119 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.63s\n", - "1213696 Examples seen. Accuracy:0.9461 Error: 0.08549 Loss:0.06467 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.63s\n", - "1214336 Examples seen. Accuracy:0.9450 Error: 0.14863 Loss:0.14249 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.62s\n", - "1214976 Examples seen. Accuracy:0.9445 Error: 0.12561 Loss:0.15898 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.63s\n", - "1215616 Examples seen. Accuracy:0.9435 Error: 0.15151 Loss:0.15308 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1216256 Examples seen. Accuracy:0.9450 Error: 0.08567 Loss:0.09853 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.68s\n", - "1216896 Examples seen. Accuracy:0.9448 Error: 0.08823 Loss:0.08829 Threads: 8 Forward time: 4.88s Backward time: 3.29s Step time: 3.61s\n", - "1217536 Examples seen. Accuracy:0.9437 Error: 0.11377 Loss:0.09614 Threads: 8 Forward time: 4.91s Backward time: 3.27s Step time: 3.62s\n", - "1218176 Examples seen. Accuracy:0.9448 Error: 0.15327 Loss:0.15363 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.61s\n", - "1218816 Examples seen. Accuracy:0.9433 Error: 0.15426 Loss:0.12756 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.61s\n", - "1219456 Examples seen. Accuracy:0.9436 Error: 0.10868 Loss:0.08253 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.62s\n", - "1220096 Examples seen. Accuracy:0.9447 Error: 0.17057 Loss:0.19539 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.62s\n", - "1220736 Examples seen. Accuracy:0.9441 Error: 0.11697 Loss:0.10937 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.63s\n", - "1221376 Examples seen. Accuracy:0.9428 Error: 0.16507 Loss:0.13780 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.61s\n", - "1222016 Examples seen. Accuracy:0.9426 Error: 0.15613 Loss:0.12696 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.62s\n", - "1222656 Examples seen. Accuracy:0.9438 Error: 0.15462 Loss:0.12980 Threads: 8 Forward time: 4.90s Backward time: 3.21s Step time: 3.60s\n", - "1223296 Examples seen. Accuracy:0.9418 Error: 0.27787 Loss:0.32166 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.69s\n", - "1223936 Examples seen. Accuracy:0.9426 Error: 0.21598 Loss:0.26784 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1224576 Examples seen. Accuracy:0.9438 Error: 0.10671 Loss:0.08176 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.63s\n", - "1225216 Examples seen. Accuracy:0.9444 Error: 0.24735 Loss:0.45198 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "1225856 Examples seen. Accuracy:0.9439 Error: 0.13822 Loss:0.14409 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.62s\n", - "1226496 Examples seen. Accuracy:0.9445 Error: 0.16606 Loss:0.16985 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.62s\n", - "1227136 Examples seen. Accuracy:0.9449 Error: 0.15579 Loss:0.14052 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.66s\n", - "1227776 Examples seen. Accuracy:0.9455 Error: 0.15393 Loss:0.16454 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "1228416 Examples seen. Accuracy:0.9464 Error: 0.13151 Loss:0.09741 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "1229056 Examples seen. Accuracy:0.9470 Error: 0.14473 Loss:0.14964 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.65s\n", - "1229696 Examples seen. Accuracy:0.9470 Error: 0.10573 Loss:0.09065 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.66s\n", - "1230336 Examples seen. Accuracy:0.9473 Error: 0.17484 Loss:0.16137 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.66s\n", - "1230976 Examples seen. Accuracy:0.9456 Error: 0.18295 Loss:0.16258 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.65s\n", - "1231616 Examples seen. Accuracy:0.9466 Error: 0.11990 Loss:0.09036 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.65s\n", - "1232256 Examples seen. Accuracy:0.9471 Error: 0.19462 Loss:0.16939 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.67s\n", - "1232896 Examples seen. Accuracy:0.9461 Error: 0.13283 Loss:0.12480 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.66s\n", - "1233536 Examples seen. Accuracy:0.9451 Error: 0.18658 Loss:0.20118 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.67s\n", - "1234176 Examples seen. Accuracy:0.9460 Error: 0.12775 Loss:0.10271 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.68s\n", - "1234816 Examples seen. Accuracy:0.9472 Error: 0.15311 Loss:0.13163 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.67s\n", - "1235456 Examples seen. Accuracy:0.9478 Error: 0.12933 Loss:0.11709 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.69s\n", - "1236096 Examples seen. Accuracy:0.9479 Error: 0.27455 Loss:0.31081 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 4.16s\n", - "1236736 Examples seen. Accuracy:0.9474 Error: 0.13430 Loss:0.11347 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1237376 Examples seen. Accuracy:0.9478 Error: 0.14763 Loss:0.11102 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.71s\n", - "1238016 Examples seen. Accuracy:0.9500 Error: 0.16345 Loss:0.16945 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1238656 Examples seen. Accuracy:0.9497 Error: 0.11825 Loss:0.09453 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.64s\n", - "1239296 Examples seen. Accuracy:0.9492 Error: 0.14777 Loss:0.10634 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.65s\n", - "1239936 Examples seen. Accuracy:0.9488 Error: 0.06227 Loss:0.06355 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.67s\n", - "1240576 Examples seen. Accuracy:0.9479 Error: 0.06274 Loss:0.03663 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "1241216 Examples seen. Accuracy:0.9473 Error: 0.14335 Loss:0.15098 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.62s\n", - "1241856 Examples seen. Accuracy:0.9465 Error: 0.18920 Loss:0.23447 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.64s\n", - "1242496 Examples seen. Accuracy:0.9465 Error: 0.26508 Loss:0.32459 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "1243136 Examples seen. Accuracy:0.9469 Error: 0.17908 Loss:0.16225 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.64s\n", - "1243776 Examples seen. Accuracy:0.9479 Error: 0.12929 Loss:0.10541 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.70s\n", - "1244416 Examples seen. Accuracy:0.9484 Error: 0.13879 Loss:0.13635 Threads: 8 Forward time: 5.07s Backward time: 3.32s Step time: 3.67s\n", - "1245056 Examples seen. Accuracy:0.9479 Error: 0.22548 Loss:0.26002 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.65s\n", - "1245696 Examples seen. Accuracy:0.9481 Error: 0.11780 Loss:0.08024 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.62s\n", - "1246336 Examples seen. Accuracy:0.9487 Error: 0.13710 Loss:0.15133 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.63s\n", - "1246976 Examples seen. Accuracy:0.9501 Error: 0.15814 Loss:0.16433 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 25 Examples seen:1247600 Validation Accuracy: 0.9801 Validation Error: 0.0592 Validation Loss: 0.0645 Total time: 137.05min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 25. Working time: 2.28 hours.\n", - "1248240 Examples seen. Accuracy:0.9493 Error: 0.19384 Loss:0.27351 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.66s\n", - "1248880 Examples seen. Accuracy:0.9504 Error: 0.16422 Loss:0.12801 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.64s\n", - "1249520 Examples seen. Accuracy:0.9513 Error: 0.14088 Loss:0.22432 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.63s\n", - "1250160 Examples seen. Accuracy:0.9514 Error: 0.16068 Loss:0.20091 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "1250800 Examples seen. Accuracy:0.9509 Error: 0.07353 Loss:0.06080 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.65s\n", - "1251440 Examples seen. Accuracy:0.9494 Error: 0.11821 Loss:0.13711 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.65s\n", - "1252080 Examples seen. Accuracy:0.9483 Error: 0.13161 Loss:0.10466 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.65s\n", - "1252720 Examples seen. Accuracy:0.9478 Error: 0.19342 Loss:0.29328 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.69s\n", - "1253360 Examples seen. Accuracy:0.9468 Error: 0.26612 Loss:0.23271 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.68s\n", - "1254000 Examples seen. Accuracy:0.9451 Error: 0.25302 Loss:0.25177 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.65s\n", - "1254640 Examples seen. Accuracy:0.9447 Error: 0.14129 Loss:0.09581 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.65s\n", - "1255280 Examples seen. Accuracy:0.9439 Error: 0.12214 Loss:0.09685 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.64s\n", - "1255920 Examples seen. Accuracy:0.9436 Error: 0.15051 Loss:0.16222 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.61s\n", - "1256560 Examples seen. Accuracy:0.9444 Error: 0.18626 Loss:0.22004 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.65s\n", - "1257200 Examples seen. Accuracy:0.9443 Error: 0.14887 Loss:0.24160 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.63s\n", - "1257840 Examples seen. Accuracy:0.9454 Error: 0.10066 Loss:0.06353 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.65s\n", - "1258480 Examples seen. Accuracy:0.9453 Error: 0.13658 Loss:0.10336 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.65s\n", - "1259120 Examples seen. Accuracy:0.9463 Error: 0.18036 Loss:0.16815 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "1259760 Examples seen. Accuracy:0.9455 Error: 0.16396 Loss:0.22196 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.65s\n", - "1260400 Examples seen. Accuracy:0.9452 Error: 0.19549 Loss:0.23918 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.65s\n", - "1261040 Examples seen. Accuracy:0.9441 Error: 0.12265 Loss:0.08074 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 4.30s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1261680 Examples seen. Accuracy:0.9429 Error: 0.18740 Loss:0.21537 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.73s\n", - "1262320 Examples seen. Accuracy:0.9433 Error: 0.15901 Loss:0.17846 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.67s\n", - "1262960 Examples seen. Accuracy:0.9442 Error: 0.07459 Loss:0.05525 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.68s\n", - "1263600 Examples seen. Accuracy:0.9456 Error: 0.13322 Loss:0.10159 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.75s\n", - "1264240 Examples seen. Accuracy:0.9446 Error: 0.20158 Loss:0.30780 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "1264880 Examples seen. Accuracy:0.9439 Error: 0.10327 Loss:0.08684 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.69s\n", - "1265520 Examples seen. Accuracy:0.9444 Error: 0.12762 Loss:0.10777 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.69s\n", - "1266160 Examples seen. Accuracy:0.9442 Error: 0.24294 Loss:0.27773 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1266800 Examples seen. Accuracy:0.9443 Error: 0.11615 Loss:0.09435 Threads: 8 Forward time: 5.17s Backward time: 3.37s Step time: 3.74s\n", - "1267440 Examples seen. Accuracy:0.9447 Error: 0.05781 Loss:0.03715 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.70s\n", - "1268080 Examples seen. Accuracy:0.9441 Error: 0.10428 Loss:0.08833 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.68s\n", - "1268720 Examples seen. Accuracy:0.9439 Error: 0.10423 Loss:0.06373 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.67s\n", - "1269360 Examples seen. Accuracy:0.9434 Error: 0.15460 Loss:0.14539 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1270000 Examples seen. Accuracy:0.9427 Error: 0.18121 Loss:0.15550 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.71s\n", - "1270640 Examples seen. Accuracy:0.9435 Error: 0.06787 Loss:0.04183 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.74s\n", - "1271280 Examples seen. Accuracy:0.9446 Error: 0.10047 Loss:0.07592 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1271920 Examples seen. Accuracy:0.9452 Error: 0.20400 Loss:0.19751 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.72s\n", - "1272560 Examples seen. Accuracy:0.9471 Error: 0.15276 Loss:0.15829 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.70s\n", - "1273200 Examples seen. Accuracy:0.9468 Error: 0.15180 Loss:0.14723 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "1273840 Examples seen. Accuracy:0.9453 Error: 0.22588 Loss:0.46818 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.69s\n", - "1274480 Examples seen. Accuracy:0.9452 Error: 0.10178 Loss:0.07394 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.70s\n", - "1275120 Examples seen. Accuracy:0.9444 Error: 0.18727 Loss:0.14680 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "1275760 Examples seen. Accuracy:0.9449 Error: 0.16739 Loss:0.15439 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.69s\n", - "1276400 Examples seen. Accuracy:0.9441 Error: 0.18841 Loss:0.22804 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.72s\n", - "1277040 Examples seen. Accuracy:0.9448 Error: 0.09128 Loss:0.06777 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.72s\n", - "1277680 Examples seen. Accuracy:0.9452 Error: 0.10307 Loss:0.06522 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.69s\n", - "1278320 Examples seen. Accuracy:0.9450 Error: 0.15240 Loss:0.10682 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.69s\n", - "1278960 Examples seen. Accuracy:0.9451 Error: 0.11009 Loss:0.08482 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.71s\n", - "1279600 Examples seen. Accuracy:0.9449 Error: 0.09153 Loss:0.08361 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.70s\n", - "1280240 Examples seen. Accuracy:0.9456 Error: 0.13692 Loss:0.13277 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.68s\n", - "1280880 Examples seen. Accuracy:0.9467 Error: 0.09231 Loss:0.06253 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.71s\n", - "1281520 Examples seen. Accuracy:0.9477 Error: 0.10891 Loss:0.07700 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.70s\n", - "1282160 Examples seen. Accuracy:0.9466 Error: 0.10718 Loss:0.18481 Threads: 8 Forward time: 5.05s Backward time: 3.30s Step time: 3.71s\n", - "1282800 Examples seen. Accuracy:0.9466 Error: 0.16089 Loss:0.23117 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.73s\n", - "1283440 Examples seen. Accuracy:0.9464 Error: 0.08310 Loss:0.06702 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.71s\n", - "1284080 Examples seen. Accuracy:0.9455 Error: 0.08818 Loss:0.05569 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.72s\n", - "1284720 Examples seen. Accuracy:0.9457 Error: 0.04130 Loss:0.02248 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.71s\n", - "1285360 Examples seen. Accuracy:0.9461 Error: 0.06465 Loss:0.04299 Threads: 8 Forward time: 5.07s Backward time: 3.32s Step time: 3.77s\n", - "1286000 Examples seen. Accuracy:0.9472 Error: 0.10965 Loss:0.09964 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.77s\n", - "1286640 Examples seen. Accuracy:0.9489 Error: 0.19077 Loss:0.15463 Threads: 8 Forward time: 5.13s Backward time: 3.33s Step time: 3.75s\n", - "1287280 Examples seen. Accuracy:0.9477 Error: 0.26201 Loss:0.36590 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.74s\n", - "1287920 Examples seen. Accuracy:0.9470 Error: 0.20909 Loss:0.19749 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.79s\n", - "1288560 Examples seen. Accuracy:0.9466 Error: 0.16427 Loss:0.17279 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.81s\n", - "1289200 Examples seen. Accuracy:0.9469 Error: 0.13868 Loss:0.14798 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.72s\n", - "1289840 Examples seen. Accuracy:0.9455 Error: 0.16524 Loss:0.20594 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.72s\n", - "1290480 Examples seen. Accuracy:0.9474 Error: 0.10920 Loss:0.09763 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.76s\n", - "1291120 Examples seen. Accuracy:0.9475 Error: 0.17874 Loss:0.12385 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 4.22s\n", - "1291760 Examples seen. Accuracy:0.9467 Error: 0.17514 Loss:0.18256 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.65s\n", - "1292400 Examples seen. Accuracy:0.9453 Error: 0.23528 Loss:0.23100 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.64s\n", - "1293040 Examples seen. Accuracy:0.9456 Error: 0.25035 Loss:0.23062 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.70s\n", - "1293680 Examples seen. Accuracy:0.9451 Error: 0.17332 Loss:0.24107 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.73s\n", - "1294320 Examples seen. Accuracy:0.9463 Error: 0.10139 Loss:0.11800 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.67s\n", - "1294960 Examples seen. Accuracy:0.9474 Error: 0.14364 Loss:0.10782 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.75s\n", - "1295600 Examples seen. Accuracy:0.9475 Error: 0.10474 Loss:0.07952 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.66s\n", - "1296240 Examples seen. Accuracy:0.9463 Error: 0.20403 Loss:0.24117 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.73s\n", - "1296880 Examples seen. Accuracy:0.9453 Error: 0.15086 Loss:0.14591 Threads: 8 Forward time: 4.89s Backward time: 3.27s Step time: 3.63s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 26 Examples seen:1297504 Validation Accuracy: 0.9815 Validation Error: 0.0580 Validation Loss: 0.0641 Total time: 142.35min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 26. Working time: 2.37 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1298144 Examples seen. Accuracy:0.9446 Error: 0.21471 Loss:0.22340 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "1298784 Examples seen. Accuracy:0.9432 Error: 0.32632 Loss:0.38966 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.64s\n", - "1299424 Examples seen. Accuracy:0.9423 Error: 0.17222 Loss:0.11502 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1300064 Examples seen. Accuracy:0.9436 Error: 0.06403 Loss:0.03990 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.64s\n", - "1300704 Examples seen. Accuracy:0.9421 Error: 0.25386 Loss:0.38098 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "1301344 Examples seen. Accuracy:0.9437 Error: 0.19014 Loss:0.19647 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "1301984 Examples seen. Accuracy:0.9435 Error: 0.12837 Loss:0.09401 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.64s\n", - "1302624 Examples seen. Accuracy:0.9434 Error: 0.12868 Loss:0.15914 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.63s\n", - "1303264 Examples seen. Accuracy:0.9434 Error: 0.18031 Loss:0.17309 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.63s\n", - "1303904 Examples seen. Accuracy:0.9430 Error: 0.16687 Loss:0.21385 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.63s\n", - "1304544 Examples seen. Accuracy:0.9435 Error: 0.13974 Loss:0.13453 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.64s\n", - "1305184 Examples seen. Accuracy:0.9444 Error: 0.13430 Loss:0.10248 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.65s\n", - "1305824 Examples seen. Accuracy:0.9462 Error: 0.06340 Loss:0.03965 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "1306464 Examples seen. Accuracy:0.9460 Error: 0.07035 Loss:0.06770 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.64s\n", - "1307104 Examples seen. Accuracy:0.9454 Error: 0.12145 Loss:0.11008 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1307744 Examples seen. Accuracy:0.9443 Error: 0.16507 Loss:0.16787 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.69s\n", - "1308384 Examples seen. Accuracy:0.9448 Error: 0.17914 Loss:0.18987 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1309024 Examples seen. Accuracy:0.9444 Error: 0.15677 Loss:0.18336 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1309664 Examples seen. Accuracy:0.9425 Error: 0.21995 Loss:0.23747 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "1310304 Examples seen. Accuracy:0.9430 Error: 0.16408 Loss:0.15231 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "1310944 Examples seen. Accuracy:0.9431 Error: 0.12083 Loss:0.12240 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.69s\n", - "1311584 Examples seen. Accuracy:0.9441 Error: 0.20318 Loss:0.22036 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "1312224 Examples seen. Accuracy:0.9442 Error: 0.20686 Loss:0.26510 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "1312864 Examples seen. Accuracy:0.9438 Error: 0.06887 Loss:0.04485 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1313504 Examples seen. Accuracy:0.9436 Error: 0.06161 Loss:0.08989 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "1314144 Examples seen. Accuracy:0.9430 Error: 0.10659 Loss:0.08089 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.67s\n", - "1314784 Examples seen. Accuracy:0.9438 Error: 0.12844 Loss:0.16150 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "1315424 Examples seen. Accuracy:0.9429 Error: 0.09915 Loss:0.24830 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.66s\n", - "1316064 Examples seen. Accuracy:0.9427 Error: 0.17798 Loss:0.18002 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 4.26s\n", - "1316704 Examples seen. Accuracy:0.9432 Error: 0.10833 Loss:0.07414 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.67s\n", - "1317344 Examples seen. Accuracy:0.9427 Error: 0.10232 Loss:0.07334 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.70s\n", - "1317984 Examples seen. Accuracy:0.9431 Error: 0.17990 Loss:0.16969 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.67s\n", - "1318624 Examples seen. Accuracy:0.9424 Error: 0.14679 Loss:0.18408 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.65s\n", - "1319264 Examples seen. Accuracy:0.9435 Error: 0.17855 Loss:0.23966 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1319904 Examples seen. Accuracy:0.9427 Error: 0.07152 Loss:0.04362 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.68s\n", - "1320544 Examples seen. Accuracy:0.9438 Error: 0.08435 Loss:0.11088 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.68s\n", - "1321184 Examples seen. Accuracy:0.9440 Error: 0.08809 Loss:0.15140 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.72s\n", - "1321824 Examples seen. Accuracy:0.9433 Error: 0.15179 Loss:0.10814 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "1322464 Examples seen. Accuracy:0.9432 Error: 0.06831 Loss:0.04797 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.72s\n", - "1323104 Examples seen. Accuracy:0.9425 Error: 0.20406 Loss:0.26747 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "1323744 Examples seen. Accuracy:0.9442 Error: 0.09283 Loss:0.09767 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.67s\n", - "1324384 Examples seen. Accuracy:0.9441 Error: 0.15506 Loss:0.17069 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.63s\n", - "1325024 Examples seen. Accuracy:0.9442 Error: 0.11763 Loss:0.18047 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.66s\n", - "1325664 Examples seen. Accuracy:0.9445 Error: 0.18907 Loss:0.15231 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "1326304 Examples seen. Accuracy:0.9443 Error: 0.22853 Loss:0.24156 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1326944 Examples seen. Accuracy:0.9451 Error: 0.09871 Loss:0.08006 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "1327584 Examples seen. Accuracy:0.9453 Error: 0.17748 Loss:0.16216 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1328224 Examples seen. Accuracy:0.9455 Error: 0.14607 Loss:0.13312 Threads: 8 Forward time: 5.16s Backward time: 3.37s Step time: 4.33s\n", - "1328864 Examples seen. Accuracy:0.9457 Error: 0.21569 Loss:0.18745 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 4.59s\n", - "1329504 Examples seen. Accuracy:0.9466 Error: 0.19779 Loss:0.33058 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.69s\n", - "1330144 Examples seen. Accuracy:0.9474 Error: 0.18095 Loss:0.21181 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.65s\n", - "1330784 Examples seen. Accuracy:0.9472 Error: 0.10384 Loss:0.09726 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.70s\n", - "1331424 Examples seen. Accuracy:0.9459 Error: 0.14583 Loss:0.15175 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "1332064 Examples seen. Accuracy:0.9461 Error: 0.08800 Loss:0.08541 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.72s\n", - "1332704 Examples seen. Accuracy:0.9463 Error: 0.17635 Loss:0.18454 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.73s\n", - "1333344 Examples seen. Accuracy:0.9469 Error: 0.08177 Loss:0.05372 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.77s\n", - "1333984 Examples seen. Accuracy:0.9468 Error: 0.09804 Loss:0.10768 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.67s\n", - "1334624 Examples seen. Accuracy:0.9465 Error: 0.06988 Loss:0.04379 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1335264 Examples seen. Accuracy:0.9475 Error: 0.21247 Loss:0.25398 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.70s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1335904 Examples seen. Accuracy:0.9469 Error: 0.28866 Loss:0.31043 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.73s\n", - "1336544 Examples seen. Accuracy:0.9457 Error: 0.10323 Loss:0.09532 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.72s\n", - "1337184 Examples seen. Accuracy:0.9445 Error: 0.12273 Loss:0.09408 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.69s\n", - "1337824 Examples seen. Accuracy:0.9440 Error: 0.07332 Loss:0.05458 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.73s\n", - "1338464 Examples seen. Accuracy:0.9445 Error: 0.12766 Loss:0.13382 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.73s\n", - "1339104 Examples seen. Accuracy:0.9440 Error: 0.12421 Loss:0.10450 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.71s\n", - "1339744 Examples seen. Accuracy:0.9439 Error: 0.16706 Loss:0.13370 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.71s\n", - "1340384 Examples seen. Accuracy:0.9418 Error: 0.11334 Loss:0.17669 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "1341024 Examples seen. Accuracy:0.9425 Error: 0.18791 Loss:0.17847 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.72s\n", - "1341664 Examples seen. Accuracy:0.9428 Error: 0.17789 Loss:0.15379 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.71s\n", - "1342304 Examples seen. Accuracy:0.9439 Error: 0.12433 Loss:0.07759 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.71s\n", - "1342944 Examples seen. Accuracy:0.9446 Error: 0.08810 Loss:0.05920 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.74s\n", - "1343584 Examples seen. Accuracy:0.9461 Error: 0.05596 Loss:0.04700 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.73s\n", - "1344224 Examples seen. Accuracy:0.9465 Error: 0.05306 Loss:0.04473 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.71s\n", - "1344864 Examples seen. Accuracy:0.9470 Error: 0.16632 Loss:0.15710 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.74s\n", - "1345504 Examples seen. Accuracy:0.9480 Error: 0.10568 Loss:0.10736 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 4.26s\n", - "1346144 Examples seen. Accuracy:0.9491 Error: 0.08046 Loss:0.05307 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1346784 Examples seen. Accuracy:0.9491 Error: 0.10946 Loss:0.08960 Threads: 8 Forward time: 4.90s Backward time: 3.22s Step time: 3.64s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 27 Examples seen:1347408 Validation Accuracy: 0.9826 Validation Error: 0.0565 Validation Loss: 0.0631 Total time: 147.65min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 27. Working time: 2.46 hours.\n", - "1348048 Examples seen. Accuracy:0.9509 Error: 0.10121 Loss:0.10480 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.83s\n", - "1348688 Examples seen. Accuracy:0.9502 Error: 0.13786 Loss:0.12307 Threads: 8 Forward time: 4.89s Backward time: 3.23s Step time: 3.63s\n", - "1349328 Examples seen. Accuracy:0.9502 Error: 0.10861 Loss:0.11271 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.63s\n", - "1349968 Examples seen. Accuracy:0.9512 Error: 0.16366 Loss:0.15103 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.65s\n", - "1350608 Examples seen. Accuracy:0.9519 Error: 0.08901 Loss:0.08968 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.64s\n", - "1351248 Examples seen. Accuracy:0.9507 Error: 0.08075 Loss:0.06334 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.67s\n", - "1351888 Examples seen. Accuracy:0.9489 Error: 0.23113 Loss:0.33942 Threads: 8 Forward time: 5.11s Backward time: 3.26s Step time: 3.68s\n", - "1352528 Examples seen. Accuracy:0.9493 Error: 0.10107 Loss:0.14827 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.63s\n", - "1353168 Examples seen. Accuracy:0.9500 Error: 0.15232 Loss:0.19826 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.66s\n", - "1353808 Examples seen. Accuracy:0.9490 Error: 0.06446 Loss:0.08063 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.62s\n", - "1354448 Examples seen. Accuracy:0.9506 Error: 0.09203 Loss:0.05924 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.59s\n", - "1355088 Examples seen. Accuracy:0.9495 Error: 0.01877 Loss:0.00969 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.58s\n", - "1355728 Examples seen. Accuracy:0.9483 Error: 0.18560 Loss:0.19859 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.60s\n", - "1356368 Examples seen. Accuracy:0.9479 Error: 0.16790 Loss:0.18150 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.61s\n", - "1357008 Examples seen. Accuracy:0.9484 Error: 0.15205 Loss:0.13063 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.62s\n", - "1357648 Examples seen. Accuracy:0.9464 Error: 0.15391 Loss:0.17763 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1358288 Examples seen. Accuracy:0.9462 Error: 0.14777 Loss:0.18169 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.61s\n", - "1358928 Examples seen. Accuracy:0.9464 Error: 0.15986 Loss:0.15231 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.59s\n", - "1359568 Examples seen. Accuracy:0.9470 Error: 0.09436 Loss:0.12597 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.59s\n", - "1360208 Examples seen. Accuracy:0.9468 Error: 0.17975 Loss:0.14399 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.59s\n", - "1360848 Examples seen. Accuracy:0.9472 Error: 0.08552 Loss:0.06134 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.59s\n", - "1361488 Examples seen. Accuracy:0.9472 Error: 0.18226 Loss:0.25142 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.68s\n", - "1362128 Examples seen. Accuracy:0.9486 Error: 0.09666 Loss:0.08678 Threads: 8 Forward time: 5.14s Backward time: 3.27s Step time: 3.66s\n", - "1362768 Examples seen. Accuracy:0.9497 Error: 0.13249 Loss:0.10215 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.68s\n", - "1363408 Examples seen. Accuracy:0.9482 Error: 0.08814 Loss:0.10143 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.64s\n", - "1364048 Examples seen. Accuracy:0.9471 Error: 0.22412 Loss:0.25013 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.74s\n", - "1364688 Examples seen. Accuracy:0.9472 Error: 0.14540 Loss:0.16838 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.72s\n", - "1365328 Examples seen. Accuracy:0.9473 Error: 0.09956 Loss:0.06736 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.67s\n", - "1365968 Examples seen. Accuracy:0.9475 Error: 0.09321 Loss:0.10851 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1366608 Examples seen. Accuracy:0.9488 Error: 0.04632 Loss:0.02714 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.67s\n", - "1367248 Examples seen. Accuracy:0.9468 Error: 0.11566 Loss:0.09503 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "1367888 Examples seen. Accuracy:0.9492 Error: 0.10286 Loss:0.08778 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.66s\n", - "1368528 Examples seen. Accuracy:0.9494 Error: 0.09261 Loss:0.16489 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.66s\n", - "1369168 Examples seen. Accuracy:0.9497 Error: 0.10361 Loss:0.08744 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.66s\n", - "1369808 Examples seen. Accuracy:0.9503 Error: 0.05990 Loss:0.07499 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "1370448 Examples seen. Accuracy:0.9493 Error: 0.19406 Loss:0.33821 Threads: 8 Forward time: 5.16s Backward time: 3.34s Step time: 4.19s\n", - "1371088 Examples seen. Accuracy:0.9496 Error: 0.16123 Loss:0.14536 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.62s\n", - "1371728 Examples seen. Accuracy:0.9494 Error: 0.14936 Loss:0.16657 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.60s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1372368 Examples seen. Accuracy:0.9496 Error: 0.11550 Loss:0.09744 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.61s\n", - "1373008 Examples seen. Accuracy:0.9500 Error: 0.13668 Loss:0.13997 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.60s\n", - "1373648 Examples seen. Accuracy:0.9494 Error: 0.13516 Loss:0.15512 Threads: 8 Forward time: 4.91s Backward time: 3.22s Step time: 3.63s\n", - "1374288 Examples seen. Accuracy:0.9494 Error: 0.13733 Loss:0.17051 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.60s\n", - "1374928 Examples seen. Accuracy:0.9490 Error: 0.13899 Loss:0.12188 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.60s\n", - "1375568 Examples seen. Accuracy:0.9481 Error: 0.14727 Loss:0.15195 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.62s\n", - "1376208 Examples seen. Accuracy:0.9463 Error: 0.21523 Loss:0.21157 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.64s\n", - "1376848 Examples seen. Accuracy:0.9460 Error: 0.11469 Loss:0.11323 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 3.63s\n", - "1377488 Examples seen. Accuracy:0.9453 Error: 0.11389 Loss:0.10801 Threads: 8 Forward time: 4.90s Backward time: 3.25s Step time: 3.61s\n", - "1378128 Examples seen. Accuracy:0.9443 Error: 0.17951 Loss:0.18461 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.61s\n", - "1378768 Examples seen. Accuracy:0.9446 Error: 0.11869 Loss:0.10371 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.64s\n", - "1379408 Examples seen. Accuracy:0.9449 Error: 0.13683 Loss:0.10894 Threads: 8 Forward time: 4.91s Backward time: 3.24s Step time: 3.61s\n", - "1380048 Examples seen. Accuracy:0.9462 Error: 0.18831 Loss:0.17881 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.59s\n", - "1380688 Examples seen. Accuracy:0.9457 Error: 0.14097 Loss:0.22286 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.62s\n", - "1381328 Examples seen. Accuracy:0.9456 Error: 0.14346 Loss:0.13224 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.61s\n", - "1381968 Examples seen. Accuracy:0.9474 Error: 0.12830 Loss:0.09888 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.61s\n", - "1382608 Examples seen. Accuracy:0.9472 Error: 0.31545 Loss:0.38050 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.61s\n", - "1383248 Examples seen. Accuracy:0.9477 Error: 0.08129 Loss:0.05224 Threads: 8 Forward time: 4.90s Backward time: 3.26s Step time: 3.62s\n", - "1383888 Examples seen. Accuracy:0.9474 Error: 0.11140 Loss:0.08422 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.61s\n", - "1384528 Examples seen. Accuracy:0.9464 Error: 0.13676 Loss:0.10637 Threads: 8 Forward time: 4.90s Backward time: 3.28s Step time: 3.61s\n", - "1385168 Examples seen. Accuracy:0.9463 Error: 0.07621 Loss:0.07987 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.60s\n", - "1385808 Examples seen. Accuracy:0.9484 Error: 0.09249 Loss:0.07141 Threads: 8 Forward time: 4.91s Backward time: 3.24s Step time: 3.61s\n", - "1386448 Examples seen. Accuracy:0.9484 Error: 0.21104 Loss:0.25385 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.60s\n", - "1387088 Examples seen. Accuracy:0.9495 Error: 0.12764 Loss:0.14795 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.60s\n", - "1387728 Examples seen. Accuracy:0.9509 Error: 0.11365 Loss:0.09538 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.61s\n", - "1388368 Examples seen. Accuracy:0.9513 Error: 0.09333 Loss:0.14052 Threads: 8 Forward time: 4.91s Backward time: 3.19s Step time: 3.60s\n", - "1389008 Examples seen. Accuracy:0.9522 Error: 0.13965 Loss:0.10517 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.65s\n", - "1389648 Examples seen. Accuracy:0.9524 Error: 0.15130 Loss:0.26410 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.60s\n", - "1390288 Examples seen. Accuracy:0.9521 Error: 0.07675 Loss:0.05048 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.60s\n", - "1390928 Examples seen. Accuracy:0.9518 Error: 0.11075 Loss:0.12370 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.62s\n", - "1391568 Examples seen. Accuracy:0.9516 Error: 0.15065 Loss:0.11448 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.61s\n", - "1392208 Examples seen. Accuracy:0.9522 Error: 0.09014 Loss:0.05841 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.61s\n", - "1392848 Examples seen. Accuracy:0.9530 Error: 0.09892 Loss:0.07473 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.66s\n", - "1393488 Examples seen. Accuracy:0.9538 Error: 0.07609 Loss:0.06965 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.63s\n", - "1394128 Examples seen. Accuracy:0.9523 Error: 0.02262 Loss:0.01602 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.63s\n", - "1394768 Examples seen. Accuracy:0.9522 Error: 0.14675 Loss:0.15687 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.65s\n", - "1395408 Examples seen. Accuracy:0.9526 Error: 0.03480 Loss:0.01913 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.68s\n", - "1396048 Examples seen. Accuracy:0.9529 Error: 0.08270 Loss:0.09780 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.61s\n", - "1396688 Examples seen. Accuracy:0.9527 Error: 0.19413 Loss:0.28563 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 28 Examples seen:1397312 Validation Accuracy: 0.9826 Validation Error: 0.0543 Validation Loss: 0.0600 Total time: 152.85min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 28. Working time: 2.55 hours.\n", - "1397952 Examples seen. Accuracy:0.9520 Error: 0.14071 Loss:0.11459 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.64s\n", - "1398592 Examples seen. Accuracy:0.9489 Error: 0.18503 Loss:0.28415 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.66s\n", - "1399232 Examples seen. Accuracy:0.9494 Error: 0.10677 Loss:0.11210 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.65s\n", - "1399872 Examples seen. Accuracy:0.9483 Error: 0.14365 Loss:0.11966 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.64s\n", - "1400512 Examples seen. Accuracy:0.9480 Error: 0.11535 Loss:0.11886 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.64s\n", - "1401152 Examples seen. Accuracy:0.9464 Error: 0.20490 Loss:0.33197 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "1401792 Examples seen. Accuracy:0.9462 Error: 0.13872 Loss:0.09682 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.66s\n", - "1402432 Examples seen. Accuracy:0.9467 Error: 0.19441 Loss:0.22473 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.68s\n", - "1403072 Examples seen. Accuracy:0.9482 Error: 0.07202 Loss:0.07695 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.74s\n", - "1403712 Examples seen. Accuracy:0.9488 Error: 0.10965 Loss:0.12676 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.63s\n", - "1404352 Examples seen. Accuracy:0.9493 Error: 0.14986 Loss:0.12941 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "1404992 Examples seen. Accuracy:0.9482 Error: 0.13769 Loss:0.13061 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "1405632 Examples seen. Accuracy:0.9493 Error: 0.19323 Loss:0.18651 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.71s\n", - "1406272 Examples seen. Accuracy:0.9489 Error: 0.09498 Loss:0.16309 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.61s\n", - "1406912 Examples seen. Accuracy:0.9487 Error: 0.13741 Loss:0.10482 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.59s\n", - "1407552 Examples seen. Accuracy:0.9484 Error: 0.07133 Loss:0.04875 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.59s\n", - "1408192 Examples seen. Accuracy:0.9475 Error: 0.17482 Loss:0.14570 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.59s\n", - "1408832 Examples seen. Accuracy:0.9472 Error: 0.10073 Loss:0.06810 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.60s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1409472 Examples seen. Accuracy:0.9469 Error: 0.08732 Loss:0.06722 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.61s\n", - "1410112 Examples seen. Accuracy:0.9463 Error: 0.18115 Loss:0.15862 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.62s\n", - "1410752 Examples seen. Accuracy:0.9454 Error: 0.20933 Loss:0.25905 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "1411392 Examples seen. Accuracy:0.9454 Error: 0.05363 Loss:0.05363 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.61s\n", - "1412032 Examples seen. Accuracy:0.9451 Error: 0.11578 Loss:0.12851 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1412672 Examples seen. Accuracy:0.9455 Error: 0.10642 Loss:0.07930 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.62s\n", - "1413312 Examples seen. Accuracy:0.9467 Error: 0.10118 Loss:0.11019 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "1413952 Examples seen. Accuracy:0.9474 Error: 0.14785 Loss:0.11550 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.63s\n", - "1414592 Examples seen. Accuracy:0.9470 Error: 0.18992 Loss:0.22856 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.63s\n", - "1415232 Examples seen. Accuracy:0.9477 Error: 0.15307 Loss:0.27508 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.67s\n", - "1415872 Examples seen. Accuracy:0.9472 Error: 0.07310 Loss:0.06484 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.65s\n", - "1416512 Examples seen. Accuracy:0.9468 Error: 0.12746 Loss:0.12694 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.68s\n", - "1417152 Examples seen. Accuracy:0.9467 Error: 0.11046 Loss:0.12320 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.62s\n", - "1417792 Examples seen. Accuracy:0.9461 Error: 0.18200 Loss:0.19451 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "1418432 Examples seen. Accuracy:0.9461 Error: 0.20302 Loss:0.19527 Threads: 8 Forward time: 5.08s Backward time: 3.21s Step time: 3.64s\n", - "1419072 Examples seen. Accuracy:0.9456 Error: 0.18755 Loss:0.27303 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "1419712 Examples seen. Accuracy:0.9456 Error: 0.14689 Loss:0.13639 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "1420352 Examples seen. Accuracy:0.9473 Error: 0.06867 Loss:0.04247 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.63s\n", - "1420992 Examples seen. Accuracy:0.9483 Error: 0.07056 Loss:0.04423 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.62s\n", - "1421632 Examples seen. Accuracy:0.9484 Error: 0.05687 Loss:0.03613 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.61s\n", - "1422272 Examples seen. Accuracy:0.9478 Error: 0.08224 Loss:0.10699 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.62s\n", - "1422912 Examples seen. Accuracy:0.9478 Error: 0.10988 Loss:0.08428 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.62s\n", - "1423552 Examples seen. Accuracy:0.9488 Error: 0.15950 Loss:0.22997 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.61s\n", - "1424192 Examples seen. Accuracy:0.9469 Error: 0.13396 Loss:0.12097 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.68s\n", - "1424832 Examples seen. Accuracy:0.9474 Error: 0.08122 Loss:0.06638 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.68s\n", - "1425472 Examples seen. Accuracy:0.9474 Error: 0.14775 Loss:0.12325 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1426112 Examples seen. Accuracy:0.9481 Error: 0.10931 Loss:0.12067 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.68s\n", - "1426752 Examples seen. Accuracy:0.9485 Error: 0.25497 Loss:0.54245 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 4.25s\n", - "1427392 Examples seen. Accuracy:0.9483 Error: 0.09018 Loss:0.08529 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.61s\n", - "1428032 Examples seen. Accuracy:0.9482 Error: 0.14518 Loss:0.09340 Threads: 8 Forward time: 4.88s Backward time: 3.23s Step time: 3.59s\n", - "1428672 Examples seen. Accuracy:0.9478 Error: 0.12913 Loss:0.19537 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.62s\n", - "1429312 Examples seen. Accuracy:0.9484 Error: 0.11393 Loss:0.09061 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.60s\n", - "1429952 Examples seen. Accuracy:0.9480 Error: 0.16260 Loss:0.14750 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.62s\n", - "1430592 Examples seen. Accuracy:0.9479 Error: 0.12916 Loss:0.17194 Threads: 8 Forward time: 4.91s Backward time: 3.28s Step time: 3.63s\n", - "1431232 Examples seen. Accuracy:0.9469 Error: 0.12566 Loss:0.18932 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.61s\n", - "1431872 Examples seen. Accuracy:0.9471 Error: 0.27435 Loss:0.37185 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.60s\n", - "1432512 Examples seen. Accuracy:0.9462 Error: 0.11519 Loss:0.09288 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.61s\n", - "1433152 Examples seen. Accuracy:0.9463 Error: 0.07026 Loss:0.05649 Threads: 8 Forward time: 4.90s Backward time: 3.25s Step time: 3.61s\n", - "1433792 Examples seen. Accuracy:0.9472 Error: 0.09960 Loss:0.06100 Threads: 8 Forward time: 4.90s Backward time: 3.25s Step time: 3.60s\n", - "1434432 Examples seen. Accuracy:0.9460 Error: 0.14787 Loss:0.23870 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.62s\n", - "1435072 Examples seen. Accuracy:0.9454 Error: 0.06363 Loss:0.07042 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1435712 Examples seen. Accuracy:0.9448 Error: 0.15126 Loss:0.17529 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.65s\n", - "1436352 Examples seen. Accuracy:0.9444 Error: 0.25060 Loss:0.33553 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.71s\n", - "1436992 Examples seen. Accuracy:0.9443 Error: 0.13863 Loss:0.15685 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.70s\n", - "1437632 Examples seen. Accuracy:0.9451 Error: 0.08611 Loss:0.05561 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.73s\n", - "1438272 Examples seen. Accuracy:0.9462 Error: 0.18388 Loss:0.13938 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.68s\n", - "1438912 Examples seen. Accuracy:0.9458 Error: 0.08707 Loss:0.06957 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1439552 Examples seen. Accuracy:0.9445 Error: 0.13905 Loss:0.15246 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.66s\n", - "1440192 Examples seen. Accuracy:0.9454 Error: 0.16139 Loss:0.13547 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.67s\n", - "1440832 Examples seen. Accuracy:0.9453 Error: 0.12961 Loss:0.18257 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "1441472 Examples seen. Accuracy:0.9463 Error: 0.14758 Loss:0.16493 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.66s\n", - "1442112 Examples seen. Accuracy:0.9462 Error: 0.21257 Loss:0.17336 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.66s\n", - "1442752 Examples seen. Accuracy:0.9457 Error: 0.10177 Loss:0.06311 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.69s\n", - "1443392 Examples seen. Accuracy:0.9446 Error: 0.22161 Loss:0.39676 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.69s\n", - "1444032 Examples seen. Accuracy:0.9436 Error: 0.07667 Loss:0.04998 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.68s\n", - "1444672 Examples seen. Accuracy:0.9416 Error: 0.20379 Loss:0.29059 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1445312 Examples seen. Accuracy:0.9425 Error: 0.14832 Loss:0.13608 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.67s\n", - "1445952 Examples seen. Accuracy:0.9431 Error: 0.22307 Loss:0.21920 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.65s\n", - "1446592 Examples seen. Accuracy:0.9458 Error: 0.09392 Loss:0.06083 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "Epochs: 29 Examples seen:1447216 Validation Accuracy: 0.9815 Validation Error: 0.0539 Validation Loss: 0.0588 Total time: 158.07min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 29. Working time: 2.63 hours.\n", - "1447856 Examples seen. Accuracy:0.9464 Error: 0.10081 Loss:0.12748 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.67s\n", - "1448496 Examples seen. Accuracy:0.9473 Error: 0.10701 Loss:0.08665 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.70s\n", - "1449136 Examples seen. Accuracy:0.9456 Error: 0.10133 Loss:0.12105 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.69s\n", - "1449776 Examples seen. Accuracy:0.9455 Error: 0.06103 Loss:0.04085 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.71s\n", - "1450416 Examples seen. Accuracy:0.9461 Error: 0.11119 Loss:0.08990 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.72s\n", - "1451056 Examples seen. Accuracy:0.9468 Error: 0.24257 Loss:0.30252 Threads: 8 Forward time: 5.13s Backward time: 3.30s Step time: 4.23s\n", - "1451696 Examples seen. Accuracy:0.9482 Error: 0.15170 Loss:0.12253 Threads: 8 Forward time: 5.17s Backward time: 3.30s Step time: 3.84s\n", - "1452336 Examples seen. Accuracy:0.9468 Error: 0.16842 Loss:0.19528 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 4.16s\n", - "1452976 Examples seen. Accuracy:0.9476 Error: 0.04499 Loss:0.04408 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.64s\n", - "1453616 Examples seen. Accuracy:0.9477 Error: 0.16040 Loss:0.22434 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.59s\n", - "1454256 Examples seen. Accuracy:0.9474 Error: 0.06507 Loss:0.04274 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.61s\n", - "1454896 Examples seen. Accuracy:0.9471 Error: 0.12018 Loss:0.08207 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.61s\n", - "1455536 Examples seen. Accuracy:0.9457 Error: 0.12105 Loss:0.13171 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.66s\n", - "1456176 Examples seen. Accuracy:0.9463 Error: 0.18114 Loss:0.15306 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.67s\n", - "1456816 Examples seen. Accuracy:0.9472 Error: 0.06513 Loss:0.03691 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.64s\n", - "1457456 Examples seen. Accuracy:0.9475 Error: 0.18867 Loss:0.19671 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.65s\n", - "1458096 Examples seen. Accuracy:0.9487 Error: 0.12882 Loss:0.13529 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.63s\n", - "1458736 Examples seen. Accuracy:0.9480 Error: 0.14104 Loss:0.21133 Threads: 8 Forward time: 5.05s Backward time: 3.21s Step time: 3.64s\n", - "1459376 Examples seen. Accuracy:0.9478 Error: 0.12597 Loss:0.11772 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.63s\n", - "1460016 Examples seen. Accuracy:0.9481 Error: 0.17798 Loss:0.20918 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.65s\n", - "1460656 Examples seen. Accuracy:0.9491 Error: 0.05437 Loss:0.04056 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.65s\n", - "1461296 Examples seen. Accuracy:0.9503 Error: 0.09656 Loss:0.15096 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.72s\n", - "1461936 Examples seen. Accuracy:0.9510 Error: 0.05769 Loss:0.04776 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n", - "1462576 Examples seen. Accuracy:0.9500 Error: 0.14814 Loss:0.16030 Threads: 8 Forward time: 5.16s Backward time: 3.29s Step time: 3.72s\n", - "1463216 Examples seen. Accuracy:0.9505 Error: 0.06782 Loss:0.06567 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.62s\n", - "1463856 Examples seen. Accuracy:0.9514 Error: 0.18103 Loss:0.18898 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.68s\n", - "1464496 Examples seen. Accuracy:0.9517 Error: 0.17383 Loss:0.19871 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "1465136 Examples seen. Accuracy:0.9509 Error: 0.13407 Loss:0.11429 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.64s\n", - "1465776 Examples seen. Accuracy:0.9512 Error: 0.14041 Loss:0.14374 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.64s\n", - "1466416 Examples seen. Accuracy:0.9507 Error: 0.09041 Loss:0.08903 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.61s\n", - "1467056 Examples seen. Accuracy:0.9504 Error: 0.13551 Loss:0.10703 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.63s\n", - "1467696 Examples seen. Accuracy:0.9486 Error: 0.10316 Loss:0.09404 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "1468336 Examples seen. Accuracy:0.9488 Error: 0.12375 Loss:0.10232 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.76s\n", - "1468976 Examples seen. Accuracy:0.9496 Error: 0.15481 Loss:0.16754 Threads: 8 Forward time: 5.11s Backward time: 3.27s Step time: 3.69s\n", - "1469616 Examples seen. Accuracy:0.9485 Error: 0.17036 Loss:0.18838 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.67s\n", - "1470256 Examples seen. Accuracy:0.9483 Error: 0.09654 Loss:0.10328 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.66s\n", - "1470896 Examples seen. Accuracy:0.9483 Error: 0.04706 Loss:0.02561 Threads: 8 Forward time: 5.05s Backward time: 3.30s Step time: 3.72s\n", - "1471536 Examples seen. Accuracy:0.9489 Error: 0.08310 Loss:0.05426 Threads: 8 Forward time: 5.06s Backward time: 3.28s Step time: 3.69s\n", - "1472176 Examples seen. Accuracy:0.9496 Error: 0.07609 Loss:0.04889 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.76s\n", - "1472816 Examples seen. Accuracy:0.9498 Error: 0.14653 Loss:0.18602 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.79s\n", - "1473456 Examples seen. Accuracy:0.9501 Error: 0.09466 Loss:0.06107 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.73s\n", - "1474096 Examples seen. Accuracy:0.9496 Error: 0.12114 Loss:0.12567 Threads: 8 Forward time: 5.09s Backward time: 3.25s Step time: 3.87s\n", - "1474736 Examples seen. Accuracy:0.9500 Error: 0.08347 Loss:0.05218 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.76s\n", - "1475376 Examples seen. Accuracy:0.9511 Error: 0.05955 Loss:0.04213 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.69s\n", - "1476016 Examples seen. Accuracy:0.9489 Error: 0.20382 Loss:0.19270 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.72s\n", - "1476656 Examples seen. Accuracy:0.9478 Error: 0.15078 Loss:0.14858 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.69s\n", - "1477296 Examples seen. Accuracy:0.9471 Error: 0.08690 Loss:0.08119 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.68s\n", - "1477936 Examples seen. Accuracy:0.9481 Error: 0.09349 Loss:0.10810 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.70s\n", - "1478576 Examples seen. Accuracy:0.9484 Error: 0.18310 Loss:0.22332 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "1479216 Examples seen. Accuracy:0.9488 Error: 0.16354 Loss:0.16635 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.67s\n", - "1479856 Examples seen. Accuracy:0.9489 Error: 0.13712 Loss:0.15687 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.71s\n", - "1480496 Examples seen. Accuracy:0.9490 Error: 0.16396 Loss:0.14134 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.71s\n", - "1481136 Examples seen. Accuracy:0.9486 Error: 0.12897 Loss:0.09610 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.67s\n", - "1481776 Examples seen. Accuracy:0.9491 Error: 0.09852 Loss:0.17474 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.72s\n", - "1482416 Examples seen. Accuracy:0.9493 Error: 0.18206 Loss:0.18885 Threads: 8 Forward time: 4.95s Backward time: 3.17s Step time: 3.69s\n", - "1483056 Examples seen. Accuracy:0.9497 Error: 0.10386 Loss:0.08507 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.68s\n", - "1483696 Examples seen. Accuracy:0.9498 Error: 0.15285 Loss:0.11083 Threads: 8 Forward time: 5.03s Backward time: 3.20s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1484336 Examples seen. Accuracy:0.9496 Error: 0.15772 Loss:0.18276 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.69s\n", - "1484976 Examples seen. Accuracy:0.9484 Error: 0.13115 Loss:0.14634 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.72s\n", - "1485616 Examples seen. Accuracy:0.9497 Error: 0.10859 Loss:0.09835 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.69s\n", - "1486256 Examples seen. Accuracy:0.9500 Error: 0.09859 Loss:0.14146 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.65s\n", - "1486896 Examples seen. Accuracy:0.9494 Error: 0.14755 Loss:0.17385 Threads: 8 Forward time: 5.09s Backward time: 3.26s Step time: 3.70s\n", - "1487536 Examples seen. Accuracy:0.9506 Error: 0.07704 Loss:0.06560 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.72s\n", - "1488176 Examples seen. Accuracy:0.9512 Error: 0.14551 Loss:0.16851 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.70s\n", - "1488816 Examples seen. Accuracy:0.9512 Error: 0.14966 Loss:0.16631 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.71s\n", - "1489456 Examples seen. Accuracy:0.9504 Error: 0.16662 Loss:0.22732 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.69s\n", - "1490096 Examples seen. Accuracy:0.9502 Error: 0.12427 Loss:0.12262 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.71s\n", - "1490736 Examples seen. Accuracy:0.9507 Error: 0.06290 Loss:0.03997 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.70s\n", - "1491376 Examples seen. Accuracy:0.9486 Error: 0.16768 Loss:0.19408 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.71s\n", - "1492016 Examples seen. Accuracy:0.9496 Error: 0.13291 Loss:0.09654 Threads: 8 Forward time: 5.10s Backward time: 3.26s Step time: 3.74s\n", - "1492656 Examples seen. Accuracy:0.9496 Error: 0.11797 Loss:0.15322 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.69s\n", - "1493296 Examples seen. Accuracy:0.9500 Error: 0.10390 Loss:0.12544 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.72s\n", - "1493936 Examples seen. Accuracy:0.9510 Error: 0.13868 Loss:0.15337 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.76s\n", - "1494576 Examples seen. Accuracy:0.9507 Error: 0.13635 Loss:0.13890 Threads: 8 Forward time: 5.11s Backward time: 3.31s Step time: 3.88s\n", - "1495216 Examples seen. Accuracy:0.9507 Error: 0.22470 Loss:0.30595 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.81s\n", - "1495856 Examples seen. Accuracy:0.9505 Error: 0.14878 Loss:0.16159 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.78s\n", - "1496496 Examples seen. Accuracy:0.9508 Error: 0.26502 Loss:0.46716 Threads: 8 Forward time: 5.10s Backward time: 3.29s Step time: 3.77s\n", - "Starting Validation.\n", - "Epochs: 30 Examples seen:1497120 Validation Accuracy: 0.9819 Validation Error: 0.0523 Validation Loss: 0.0569 Total time: 163.36min\n", - "Starting Testing.\n", - "Epochs: 30 Examples seen:1497120 Test Accuracy: 0.9859 Test Error: 0.0456 Test Loss: 0.0411 Total time: 163.83min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 30. Working time: 2.73 hours.\n", - "Learning rate set to:0.00074\n", - "1497760 Examples seen. Accuracy:0.9508 Error: 0.14861 Loss:0.14557 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.90s\n", - "1498400 Examples seen. Accuracy:0.9519 Error: 0.17429 Loss:0.13326 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "1499040 Examples seen. Accuracy:0.9517 Error: 0.17726 Loss:0.22423 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.75s\n", - "1499680 Examples seen. Accuracy:0.9513 Error: 0.06564 Loss:0.03994 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1500320 Examples seen. Accuracy:0.9519 Error: 0.11300 Loss:0.19092 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.69s\n", - "1500960 Examples seen. Accuracy:0.9528 Error: 0.06666 Loss:0.04443 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.71s\n", - "1501600 Examples seen. Accuracy:0.9532 Error: 0.14064 Loss:0.11202 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.71s\n", - "1502240 Examples seen. Accuracy:0.9532 Error: 0.10233 Loss:0.06801 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.73s\n", - "1502880 Examples seen. Accuracy:0.9534 Error: 0.14872 Loss:0.10102 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.72s\n", - "1503520 Examples seen. Accuracy:0.9539 Error: 0.21124 Loss:0.21142 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.73s\n", - "1504160 Examples seen. Accuracy:0.9539 Error: 0.21600 Loss:0.23482 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.76s\n", - "1504800 Examples seen. Accuracy:0.9539 Error: 0.12996 Loss:0.11105 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.70s\n", - "1505440 Examples seen. Accuracy:0.9532 Error: 0.13709 Loss:0.13116 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.70s\n", - "1506080 Examples seen. Accuracy:0.9527 Error: 0.20072 Loss:0.24342 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.75s\n", - "1506720 Examples seen. Accuracy:0.9532 Error: 0.20407 Loss:0.28499 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.69s\n", - "1507360 Examples seen. Accuracy:0.9532 Error: 0.05877 Loss:0.03551 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.71s\n", - "1508000 Examples seen. Accuracy:0.9528 Error: 0.05974 Loss:0.05044 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.75s\n", - "1508640 Examples seen. Accuracy:0.9530 Error: 0.14688 Loss:0.15558 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.71s\n", - "1509280 Examples seen. Accuracy:0.9522 Error: 0.13597 Loss:0.19620 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.70s\n", - "1509920 Examples seen. Accuracy:0.9523 Error: 0.09955 Loss:0.14736 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.70s\n", - "1510560 Examples seen. Accuracy:0.9510 Error: 0.22952 Loss:0.26378 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.77s\n", - "1511200 Examples seen. Accuracy:0.9508 Error: 0.17019 Loss:0.19236 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.68s\n", - "1511840 Examples seen. Accuracy:0.9501 Error: 0.08093 Loss:0.05472 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.68s\n", - "1512480 Examples seen. Accuracy:0.9481 Error: 0.11854 Loss:0.22845 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.77s\n", - "1513120 Examples seen. Accuracy:0.9488 Error: 0.08967 Loss:0.08184 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.70s\n", - "1513760 Examples seen. Accuracy:0.9513 Error: 0.10437 Loss:0.12372 Threads: 8 Forward time: 5.15s Backward time: 3.30s Step time: 3.73s\n", - "1514400 Examples seen. Accuracy:0.9516 Error: 0.08894 Loss:0.15115 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.77s\n", - "1515040 Examples seen. Accuracy:0.9506 Error: 0.22076 Loss:0.21993 Threads: 8 Forward time: 5.08s Backward time: 3.30s Step time: 3.73s\n", - "1515680 Examples seen. Accuracy:0.9501 Error: 0.12816 Loss:0.14776 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.72s\n", - "1516320 Examples seen. Accuracy:0.9498 Error: 0.18388 Loss:0.21836 Threads: 8 Forward time: 5.10s Backward time: 3.28s Step time: 3.98s\n", - "1516960 Examples seen. Accuracy:0.9495 Error: 0.12942 Loss:0.08564 Threads: 8 Forward time: 5.13s Backward time: 3.33s Step time: 3.74s\n", - "1517600 Examples seen. Accuracy:0.9483 Error: 0.14637 Loss:0.09820 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.75s\n", - "1518240 Examples seen. Accuracy:0.9492 Error: 0.11324 Loss:0.15473 Threads: 8 Forward time: 5.10s Backward time: 3.29s Step time: 3.76s\n", - "1518880 Examples seen. Accuracy:0.9509 Error: 0.08589 Loss:0.07891 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.73s\n", - "1519520 Examples seen. Accuracy:0.9502 Error: 0.12827 Loss:0.08967 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.66s\n", - "1520160 Examples seen. Accuracy:0.9501 Error: 0.12325 Loss:0.13687 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1520800 Examples seen. Accuracy:0.9503 Error: 0.18560 Loss:0.21073 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.66s\n", - "1521440 Examples seen. Accuracy:0.9515 Error: 0.13357 Loss:0.11113 Threads: 8 Forward time: 5.03s Backward time: 3.28s Step time: 3.67s\n", - "1522080 Examples seen. Accuracy:0.9521 Error: 0.02460 Loss:0.01391 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.66s\n", - "1522720 Examples seen. Accuracy:0.9517 Error: 0.10466 Loss:0.11742 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "1523360 Examples seen. Accuracy:0.9524 Error: 0.08335 Loss:0.08736 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.68s\n", - "1524000 Examples seen. Accuracy:0.9529 Error: 0.10179 Loss:0.08754 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.65s\n", - "1524640 Examples seen. Accuracy:0.9543 Error: 0.08259 Loss:0.05290 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.66s\n", - "1525280 Examples seen. Accuracy:0.9526 Error: 0.06198 Loss:0.04723 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1525920 Examples seen. Accuracy:0.9516 Error: 0.12397 Loss:0.10570 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "1526560 Examples seen. Accuracy:0.9523 Error: 0.13771 Loss:0.12439 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.70s\n", - "1527200 Examples seen. Accuracy:0.9510 Error: 0.15107 Loss:0.12061 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.68s\n", - "1527840 Examples seen. Accuracy:0.9519 Error: 0.04796 Loss:0.03480 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.67s\n", - "1528480 Examples seen. Accuracy:0.9524 Error: 0.10863 Loss:0.09759 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.68s\n", - "1529120 Examples seen. Accuracy:0.9521 Error: 0.12279 Loss:0.10436 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "1529760 Examples seen. Accuracy:0.9515 Error: 0.10621 Loss:0.13907 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.65s\n", - "1530400 Examples seen. Accuracy:0.9509 Error: 0.03789 Loss:0.02090 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.67s\n", - "1531040 Examples seen. Accuracy:0.9507 Error: 0.07933 Loss:0.10439 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.67s\n", - "1531680 Examples seen. Accuracy:0.9496 Error: 0.06434 Loss:0.04204 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.68s\n", - "1532320 Examples seen. Accuracy:0.9524 Error: 0.04289 Loss:0.02351 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.66s\n", - "1532960 Examples seen. Accuracy:0.9546 Error: 0.12121 Loss:0.14004 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.72s\n", - "1533600 Examples seen. Accuracy:0.9558 Error: 0.07601 Loss:0.04922 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.71s\n", - "1534240 Examples seen. Accuracy:0.9565 Error: 0.06622 Loss:0.06335 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.69s\n", - "1534880 Examples seen. Accuracy:0.9551 Error: 0.18031 Loss:0.20529 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.70s\n", - "1535520 Examples seen. Accuracy:0.9551 Error: 0.10154 Loss:0.08534 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.70s\n", - "1536160 Examples seen. Accuracy:0.9545 Error: 0.11521 Loss:0.11783 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.67s\n", - "1536800 Examples seen. Accuracy:0.9544 Error: 0.15828 Loss:0.15493 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.67s\n", - "1537440 Examples seen. Accuracy:0.9545 Error: 0.06720 Loss:0.03915 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.73s\n", - "1538080 Examples seen. Accuracy:0.9545 Error: 0.09375 Loss:0.14285 Threads: 8 Forward time: 5.07s Backward time: 3.22s Step time: 3.68s\n", - "1538720 Examples seen. Accuracy:0.9549 Error: 0.12214 Loss:0.12520 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.69s\n", - "1539360 Examples seen. Accuracy:0.9549 Error: 0.20448 Loss:0.19033 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.68s\n", - "1540000 Examples seen. Accuracy:0.9537 Error: 0.10431 Loss:0.07206 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.69s\n", - "1540640 Examples seen. Accuracy:0.9541 Error: 0.07141 Loss:0.04654 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.67s\n", - "1541280 Examples seen. Accuracy:0.9545 Error: 0.07900 Loss:0.06020 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.67s\n", - "1541920 Examples seen. Accuracy:0.9537 Error: 0.17797 Loss:0.23254 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.67s\n", - "1542560 Examples seen. Accuracy:0.9538 Error: 0.08829 Loss:0.07143 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.68s\n", - "1543200 Examples seen. Accuracy:0.9539 Error: 0.12470 Loss:0.10698 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.68s\n", - "1543840 Examples seen. Accuracy:0.9535 Error: 0.11565 Loss:0.08203 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "1544480 Examples seen. Accuracy:0.9529 Error: 0.07867 Loss:0.06331 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.65s\n", - "1545120 Examples seen. Accuracy:0.9533 Error: 0.11913 Loss:0.09546 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.72s\n", - "1545760 Examples seen. Accuracy:0.9531 Error: 0.12726 Loss:0.43528 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.65s\n", - "1546400 Examples seen. Accuracy:0.9528 Error: 0.05801 Loss:0.03907 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.68s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 31 Examples seen:1547024 Validation Accuracy: 0.9830 Validation Error: 0.0514 Validation Loss: 0.0551 Total time: 169.13min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 31. Working time: 2.82 hours.\n", - "1547664 Examples seen. Accuracy:0.9529 Error: 0.16628 Loss:0.18915 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.71s\n", - "1548304 Examples seen. Accuracy:0.9530 Error: 0.12248 Loss:0.13709 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.69s\n", - "1548944 Examples seen. Accuracy:0.9530 Error: 0.16205 Loss:0.16951 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.70s\n", - "1549584 Examples seen. Accuracy:0.9531 Error: 0.05847 Loss:0.05447 Threads: 8 Forward time: 5.10s Backward time: 3.34s Step time: 3.68s\n", - "1550224 Examples seen. Accuracy:0.9545 Error: 0.14723 Loss:0.21584 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.79s\n", - "1550864 Examples seen. Accuracy:0.9533 Error: 0.14106 Loss:0.11526 Threads: 8 Forward time: 5.07s Backward time: 3.22s Step time: 3.76s\n", - "1551504 Examples seen. Accuracy:0.9538 Error: 0.04955 Loss:0.03242 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.76s\n", - "1552144 Examples seen. Accuracy:0.9538 Error: 0.20006 Loss:0.23725 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.75s\n", - "1552784 Examples seen. Accuracy:0.9545 Error: 0.15203 Loss:0.12704 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.73s\n", - "1553424 Examples seen. Accuracy:0.9537 Error: 0.31141 Loss:0.32836 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.69s\n", - "1554064 Examples seen. Accuracy:0.9525 Error: 0.14953 Loss:0.17753 Threads: 8 Forward time: 5.24s Backward time: 3.27s Step time: 3.89s\n", - "1554704 Examples seen. Accuracy:0.9535 Error: 0.15743 Loss:0.18140 Threads: 8 Forward time: 5.13s Backward time: 3.32s Step time: 3.91s\n", - "1555344 Examples seen. Accuracy:0.9535 Error: 0.07978 Loss:0.08508 Threads: 8 Forward time: 5.13s Backward time: 3.32s Step time: 3.87s\n", - "1555984 Examples seen. Accuracy:0.9546 Error: 0.15380 Loss:0.19071 Threads: 8 Forward time: 5.08s Backward time: 3.28s Step time: 3.87s\n", - "1556624 Examples seen. Accuracy:0.9562 Error: 0.07796 Loss:0.04686 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.81s\n", - "1557264 Examples seen. Accuracy:0.9550 Error: 0.22887 Loss:0.26361 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1557904 Examples seen. Accuracy:0.9558 Error: 0.08477 Loss:0.05844 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.73s\n", - "1558544 Examples seen. Accuracy:0.9539 Error: 0.15311 Loss:0.13919 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1559184 Examples seen. Accuracy:0.9539 Error: 0.14976 Loss:0.20154 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.69s\n", - "1559824 Examples seen. Accuracy:0.9547 Error: 0.05121 Loss:0.02976 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.74s\n", - "1560464 Examples seen. Accuracy:0.9533 Error: 0.17158 Loss:0.19611 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.70s\n", - "1561104 Examples seen. Accuracy:0.9535 Error: 0.12366 Loss:0.11041 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "1561744 Examples seen. Accuracy:0.9541 Error: 0.09472 Loss:0.16228 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.69s\n", - "1562384 Examples seen. Accuracy:0.9537 Error: 0.13272 Loss:0.12658 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1563024 Examples seen. Accuracy:0.9532 Error: 0.13142 Loss:0.14904 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.81s\n", - "1563664 Examples seen. Accuracy:0.9525 Error: 0.16918 Loss:0.21944 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.74s\n", - "1564304 Examples seen. Accuracy:0.9522 Error: 0.04426 Loss:0.05508 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.68s\n", - "1564944 Examples seen. Accuracy:0.9526 Error: 0.05966 Loss:0.04083 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "1565584 Examples seen. Accuracy:0.9524 Error: 0.07746 Loss:0.06974 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.73s\n", - "1566224 Examples seen. Accuracy:0.9521 Error: 0.04751 Loss:0.03113 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.71s\n", - "1566864 Examples seen. Accuracy:0.9516 Error: 0.16781 Loss:0.12833 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.69s\n", - "1567504 Examples seen. Accuracy:0.9524 Error: 0.03014 Loss:0.01646 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.71s\n", - "1568144 Examples seen. Accuracy:0.9523 Error: 0.10557 Loss:0.11976 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.80s\n", - "1568784 Examples seen. Accuracy:0.9531 Error: 0.06069 Loss:0.03711 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.81s\n", - "1569424 Examples seen. Accuracy:0.9529 Error: 0.10758 Loss:0.08671 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.73s\n", - "1570064 Examples seen. Accuracy:0.9529 Error: 0.10368 Loss:0.08403 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.68s\n", - "1570704 Examples seen. Accuracy:0.9531 Error: 0.15546 Loss:0.15937 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.72s\n", - "1571344 Examples seen. Accuracy:0.9538 Error: 0.12103 Loss:0.12506 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.70s\n", - "1571984 Examples seen. Accuracy:0.9540 Error: 0.13243 Loss:0.12731 Threads: 8 Forward time: 5.16s Backward time: 3.35s Step time: 3.76s\n", - "1572624 Examples seen. Accuracy:0.9541 Error: 0.10642 Loss:0.09237 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.79s\n", - "1573264 Examples seen. Accuracy:0.9533 Error: 0.08424 Loss:0.09995 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.77s\n", - "1573904 Examples seen. Accuracy:0.9542 Error: 0.12324 Loss:0.13014 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1574544 Examples seen. Accuracy:0.9547 Error: 0.11231 Loss:0.11648 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.71s\n", - "1575184 Examples seen. Accuracy:0.9549 Error: 0.18973 Loss:0.20791 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.77s\n", - "1575824 Examples seen. Accuracy:0.9553 Error: 0.09004 Loss:0.07261 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.74s\n", - "1576464 Examples seen. Accuracy:0.9550 Error: 0.14157 Loss:0.16398 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.69s\n", - "1577104 Examples seen. Accuracy:0.9543 Error: 0.19825 Loss:0.32202 Threads: 8 Forward time: 5.05s Backward time: 3.20s Step time: 3.82s\n", - "1577744 Examples seen. Accuracy:0.9537 Error: 0.09035 Loss:0.06121 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "1578384 Examples seen. Accuracy:0.9526 Error: 0.13430 Loss:0.12229 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1579024 Examples seen. Accuracy:0.9529 Error: 0.10155 Loss:0.07976 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.70s\n", - "1579664 Examples seen. Accuracy:0.9531 Error: 0.05964 Loss:0.03903 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.70s\n", - "1580304 Examples seen. Accuracy:0.9510 Error: 0.17312 Loss:0.14742 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.68s\n", - "1580944 Examples seen. Accuracy:0.9503 Error: 0.04752 Loss:0.03827 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.69s\n", - "1581584 Examples seen. Accuracy:0.9517 Error: 0.08759 Loss:0.10440 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.72s\n", - "1582224 Examples seen. Accuracy:0.9515 Error: 0.06674 Loss:0.04134 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.71s\n", - "1582864 Examples seen. Accuracy:0.9513 Error: 0.14279 Loss:0.17607 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "1583504 Examples seen. Accuracy:0.9532 Error: 0.07118 Loss:0.07629 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1584144 Examples seen. Accuracy:0.9526 Error: 0.12284 Loss:0.13673 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.71s\n", - "1584784 Examples seen. Accuracy:0.9528 Error: 0.07510 Loss:0.09906 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.69s\n", - "1585424 Examples seen. Accuracy:0.9531 Error: 0.11727 Loss:0.09386 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.78s\n", - "1586064 Examples seen. Accuracy:0.9526 Error: 0.14606 Loss:0.19256 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.66s\n", - "1586704 Examples seen. Accuracy:0.9529 Error: 0.13011 Loss:0.16303 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.73s\n", - "1587344 Examples seen. Accuracy:0.9533 Error: 0.08130 Loss:0.16172 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.69s\n", - "1587984 Examples seen. Accuracy:0.9549 Error: 0.07929 Loss:0.05760 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "1588624 Examples seen. Accuracy:0.9532 Error: 0.14834 Loss:0.13651 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.63s\n", - "1589264 Examples seen. Accuracy:0.9528 Error: 0.21870 Loss:0.24153 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1589904 Examples seen. Accuracy:0.9533 Error: 0.09359 Loss:0.07661 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.68s\n", - "1590544 Examples seen. Accuracy:0.9542 Error: 0.06378 Loss:0.04138 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.66s\n", - "1591184 Examples seen. Accuracy:0.9545 Error: 0.12400 Loss:0.16289 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.70s\n", - "1591824 Examples seen. Accuracy:0.9557 Error: 0.06807 Loss:0.05759 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.66s\n", - "1592464 Examples seen. Accuracy:0.9552 Error: 0.10427 Loss:0.17617 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.65s\n", - "1593104 Examples seen. Accuracy:0.9556 Error: 0.15271 Loss:0.13842 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.67s\n", - "1593744 Examples seen. Accuracy:0.9566 Error: 0.18541 Loss:0.22167 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.68s\n", - "1594384 Examples seen. Accuracy:0.9577 Error: 0.06719 Loss:0.06279 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.65s\n", - "1595024 Examples seen. Accuracy:0.9565 Error: 0.07624 Loss:0.05033 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1595664 Examples seen. Accuracy:0.9558 Error: 0.15134 Loss:0.14120 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.65s\n", - "1596304 Examples seen. Accuracy:0.9557 Error: 0.14495 Loss:0.21276 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.67s\n", - "Starting Validation.\n", - "Epochs: 32 Examples seen:1596928 Validation Accuracy: 0.9819 Validation Error: 0.0508 Validation Loss: 0.0543 Total time: 174.44min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.439 Min Weight: -0.343 Max Output: 5.860 Min Output: -5.674 TNNetConvolutionLinear 66,66,64 Times: 8.59s 0.40s Parent:0\n", - "Layer 2 Max Output: 5.860 Min Output: -3.336 TNNetMaxPool 33,33,64 Times: 3.64s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.625 Min Weight: 0.249 Max Output: 8.984 Min Output: -5.738 TNNetMovingStdNormalization 33,33,64 Times: 0.28s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.383 Min Weight: -0.216 Max Output: 11.040 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.89s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.369 Min Weight: -0.339 Max Output: 10.826 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.91s 0.18s Parent:4\n", - "Layer 6 Max Output: 10.826 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.49s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.410 Min Weight: -0.262 Max Output: 8.117 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.266 Min Weight: -0.242 Max Output: 6.709 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.49s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.249 Min Weight: -0.203 Max Output: 10.613 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.45s 0.02s Parent:8\n", - "Layer 10 Max Output: 10.613 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.00s 0.00s Parent:9\n", - "Layer 11 Max Output: 10.613 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.393 Min Weight: -0.392 Max Output: 30.513 Min Output: -13.762 TNNetFullConnectLinear 39,1,1 Times: 0.02s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 32. Working time: 2.91 hours.\n", - "1597568 Examples seen. Accuracy:0.9541 Error: 0.16013 Loss:0.24270 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.77s\n", - "1598208 Examples seen. Accuracy:0.9552 Error: 0.10279 Loss:0.11600 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.69s\n", - "1598848 Examples seen. Accuracy:0.9563 Error: 0.16388 Loss:0.23268 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1599488 Examples seen. Accuracy:0.9570 Error: 0.08905 Loss:0.06510 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.73s\n", - "1600128 Examples seen. Accuracy:0.9554 Error: 0.14965 Loss:0.31265 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.74s\n", - "1600768 Examples seen. Accuracy:0.9560 Error: 0.14754 Loss:0.16965 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.73s\n", - "1601408 Examples seen. Accuracy:0.9563 Error: 0.08028 Loss:0.04986 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.64s\n", - "1602048 Examples seen. Accuracy:0.9564 Error: 0.06702 Loss:0.04836 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.64s\n", - "1602688 Examples seen. Accuracy:0.9569 Error: 0.04664 Loss:0.08026 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.74s\n", - "1603328 Examples seen. Accuracy:0.9573 Error: 0.08061 Loss:0.09195 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.64s\n", - "1603968 Examples seen. Accuracy:0.9571 Error: 0.12977 Loss:0.13318 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.65s\n", - "1604608 Examples seen. Accuracy:0.9557 Error: 0.13459 Loss:0.19731 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.62s\n", - "1605248 Examples seen. Accuracy:0.9555 Error: 0.09090 Loss:0.15428 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1605888 Examples seen. Accuracy:0.9548 Error: 0.11312 Loss:0.09419 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.61s\n", - "1606528 Examples seen. Accuracy:0.9543 Error: 0.17129 Loss:0.22943 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.64s\n", - "1607168 Examples seen. Accuracy:0.9538 Error: 0.09884 Loss:0.14281 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.65s\n", - "1607808 Examples seen. Accuracy:0.9542 Error: 0.15143 Loss:0.13002 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.64s\n", - "1608448 Examples seen. Accuracy:0.9546 Error: 0.05632 Loss:0.05603 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.65s\n", - "1609088 Examples seen. Accuracy:0.9545 Error: 0.08208 Loss:0.08032 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.69s\n", - "1609728 Examples seen. Accuracy:0.9528 Error: 0.12700 Loss:0.15873 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.65s\n", - "1610368 Examples seen. Accuracy:0.9531 Error: 0.17311 Loss:0.16741 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.67s\n", - "1611008 Examples seen. Accuracy:0.9535 Error: 0.04387 Loss:0.02371 Threads: 8 Forward time: 5.04s Backward time: 3.31s Step time: 3.69s\n", - "1611648 Examples seen. Accuracy:0.9533 Error: 0.12807 Loss:0.15235 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.65s\n", - "1612288 Examples seen. Accuracy:0.9537 Error: 0.12386 Loss:0.11456 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.63s\n", - "1612928 Examples seen. Accuracy:0.9538 Error: 0.19187 Loss:0.16314 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.64s\n", - "1613568 Examples seen. Accuracy:0.9507 Error: 0.12188 Loss:0.13847 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.64s\n", - "1614208 Examples seen. Accuracy:0.9507 Error: 0.22041 Loss:0.34930 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.63s\n", - "1614848 Examples seen. Accuracy:0.9507 Error: 0.09226 Loss:0.06922 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.63s\n", - "1615488 Examples seen. Accuracy:0.9505 Error: 0.22833 Loss:0.21934 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.64s\n", - "1616128 Examples seen. Accuracy:0.9495 Error: 0.20253 Loss:0.23931 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.62s\n", - "1616768 Examples seen. Accuracy:0.9503 Error: 0.06496 Loss:0.05083 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.62s\n", - "1617408 Examples seen. Accuracy:0.9499 Error: 0.08523 Loss:0.06422 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.63s\n", - "1618048 Examples seen. Accuracy:0.9500 Error: 0.09634 Loss:0.08651 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.65s\n", - "1618688 Examples seen. Accuracy:0.9513 Error: 0.04952 Loss:0.03577 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.65s\n", - "1619328 Examples seen. Accuracy:0.9511 Error: 0.12932 Loss:0.10125 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.62s\n", - "1619968 Examples seen. Accuracy:0.9505 Error: 0.22770 Loss:0.25212 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.61s\n", - "1620608 Examples seen. Accuracy:0.9510 Error: 0.12027 Loss:0.08784 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.70s\n", - "1621248 Examples seen. Accuracy:0.9511 Error: 0.08571 Loss:0.06502 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.63s\n", - "1621888 Examples seen. Accuracy:0.9506 Error: 0.15459 Loss:0.14452 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.67s\n", - "1622528 Examples seen. Accuracy:0.9524 Error: 0.13068 Loss:0.13016 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1623168 Examples seen. Accuracy:0.9521 Error: 0.13584 Loss:0.11718 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.63s\n", - "1623808 Examples seen. Accuracy:0.9516 Error: 0.08862 Loss:0.06971 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.62s\n", - "1624448 Examples seen. Accuracy:0.9514 Error: 0.18071 Loss:0.20353 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.62s\n", - "1625088 Examples seen. Accuracy:0.9516 Error: 0.13334 Loss:0.11217 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.62s\n", - "1625728 Examples seen. Accuracy:0.9516 Error: 0.21728 Loss:0.29211 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.65s\n", - "1626368 Examples seen. Accuracy:0.9522 Error: 0.15522 Loss:0.24822 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "1627008 Examples seen. Accuracy:0.9521 Error: 0.09664 Loss:0.10891 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.63s\n", - "1627648 Examples seen. Accuracy:0.9521 Error: 0.13974 Loss:0.20051 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.63s\n", - "1628288 Examples seen. Accuracy:0.9513 Error: 0.11770 Loss:0.17074 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.72s\n", - "1628928 Examples seen. Accuracy:0.9514 Error: 0.09608 Loss:0.07821 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "1629568 Examples seen. Accuracy:0.9517 Error: 0.10634 Loss:0.07820 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.65s\n", - "1630208 Examples seen. Accuracy:0.9504 Error: 0.09921 Loss:0.07959 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.63s\n", - "1630848 Examples seen. Accuracy:0.9515 Error: 0.07357 Loss:0.07447 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.65s\n", - "1631488 Examples seen. Accuracy:0.9523 Error: 0.07915 Loss:0.10349 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.68s\n", - "1632128 Examples seen. Accuracy:0.9511 Error: 0.16141 Loss:0.19480 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.63s\n", - "1632768 Examples seen. Accuracy:0.9507 Error: 0.18076 Loss:0.26829 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.60s\n", - "1633408 Examples seen. Accuracy:0.9512 Error: 0.11791 Loss:0.12039 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.73s\n", - "1634048 Examples seen. Accuracy:0.9510 Error: 0.16509 Loss:0.13551 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.67s\n", - "1634688 Examples seen. Accuracy:0.9506 Error: 0.09375 Loss:0.15359 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.67s\n", - "1635328 Examples seen. Accuracy:0.9502 Error: 0.14889 Loss:0.28760 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1635968 Examples seen. Accuracy:0.9500 Error: 0.16107 Loss:0.17486 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.68s\n", - "1636608 Examples seen. Accuracy:0.9508 Error: 0.18205 Loss:0.29972 Threads: 8 Forward time: 5.10s Backward time: 3.31s Step time: 3.75s\n", - "1637248 Examples seen. Accuracy:0.9500 Error: 0.08575 Loss:0.06189 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 4.33s\n", - "1637888 Examples seen. Accuracy:0.9498 Error: 0.12046 Loss:0.14560 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "1638528 Examples seen. Accuracy:0.9506 Error: 0.15807 Loss:0.18995 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.66s\n", - "1639168 Examples seen. Accuracy:0.9520 Error: 0.13134 Loss:0.10707 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.75s\n", - "1639808 Examples seen. Accuracy:0.9526 Error: 0.15300 Loss:0.12814 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.67s\n", - "1640448 Examples seen. Accuracy:0.9543 Error: 0.07585 Loss:0.09203 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.70s\n", - "1641088 Examples seen. Accuracy:0.9548 Error: 0.11588 Loss:0.09295 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.69s\n", - "1641728 Examples seen. Accuracy:0.9560 Error: 0.10694 Loss:0.14617 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.69s\n", - "1642368 Examples seen. Accuracy:0.9562 Error: 0.22664 Loss:0.23409 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.72s\n", - "1643008 Examples seen. Accuracy:0.9573 Error: 0.08852 Loss:0.06584 Threads: 8 Forward time: 5.12s Backward time: 3.28s Step time: 3.73s\n", - "1643648 Examples seen. Accuracy:0.9572 Error: 0.08380 Loss:0.05452 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1644288 Examples seen. Accuracy:0.9570 Error: 0.08202 Loss:0.08988 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.68s\n", - "1644928 Examples seen. Accuracy:0.9553 Error: 0.13404 Loss:0.13777 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.65s\n", - "1645568 Examples seen. Accuracy:0.9556 Error: 0.20865 Loss:0.26621 Threads: 8 Forward time: 5.15s Backward time: 3.28s Step time: 3.69s\n", - "1646208 Examples seen. Accuracy:0.9565 Error: 0.10476 Loss:0.06641 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.66s\n", - "Starting Validation.\n", - "Epochs: 33 Examples seen:1646832 Validation Accuracy: 0.9815 Validation Error: 0.0504 Validation Loss: 0.0528 Total time: 179.68min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 33. Working time: 2.99 hours.\n", - "1647472 Examples seen. Accuracy:0.9563 Error: 0.09850 Loss:0.08254 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.72s\n", - "1648112 Examples seen. Accuracy:0.9555 Error: 0.12796 Loss:0.15990 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.69s\n", - "1648752 Examples seen. Accuracy:0.9553 Error: 0.17558 Loss:0.18911 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "1649392 Examples seen. Accuracy:0.9555 Error: 0.09734 Loss:0.06831 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.64s\n", - "1650032 Examples seen. Accuracy:0.9540 Error: 0.13836 Loss:0.15612 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.66s\n", - "1650672 Examples seen. Accuracy:0.9534 Error: 0.14649 Loss:0.14747 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.66s\n", - "1651312 Examples seen. Accuracy:0.9535 Error: 0.11542 Loss:0.08050 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.63s\n", - "1651952 Examples seen. Accuracy:0.9539 Error: 0.09480 Loss:0.09954 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.64s\n", - "1652592 Examples seen. Accuracy:0.9556 Error: 0.09973 Loss:0.07211 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.67s\n", - "1653232 Examples seen. Accuracy:0.9568 Error: 0.06803 Loss:0.10311 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1653872 Examples seen. Accuracy:0.9560 Error: 0.19173 Loss:0.18893 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.63s\n", - "1654512 Examples seen. Accuracy:0.9567 Error: 0.10031 Loss:0.08963 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.66s\n", - "1655152 Examples seen. Accuracy:0.9564 Error: 0.09679 Loss:0.07225 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.65s\n", - "1655792 Examples seen. Accuracy:0.9570 Error: 0.10773 Loss:0.07797 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.66s\n", - "1656432 Examples seen. Accuracy:0.9566 Error: 0.04826 Loss:0.02962 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "1657072 Examples seen. Accuracy:0.9571 Error: 0.05885 Loss:0.03571 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.66s\n", - "1657712 Examples seen. Accuracy:0.9570 Error: 0.14733 Loss:0.18574 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.78s\n", - "1658352 Examples seen. Accuracy:0.9572 Error: 0.08525 Loss:0.05966 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.69s\n", - "1658992 Examples seen. Accuracy:0.9569 Error: 0.10925 Loss:0.14533 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.69s\n", - "1659632 Examples seen. Accuracy:0.9568 Error: 0.16850 Loss:0.19956 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1660272 Examples seen. Accuracy:0.9551 Error: 0.19491 Loss:0.17583 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.69s\n", - "1660912 Examples seen. Accuracy:0.9551 Error: 0.13619 Loss:0.10465 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.69s\n", - "1661552 Examples seen. Accuracy:0.9559 Error: 0.05943 Loss:0.06035 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.65s\n", - "1662192 Examples seen. Accuracy:0.9562 Error: 0.09879 Loss:0.07463 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.66s\n", - "1662832 Examples seen. Accuracy:0.9552 Error: 0.05178 Loss:0.02908 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.66s\n", - "1663472 Examples seen. Accuracy:0.9548 Error: 0.16065 Loss:0.22986 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "1664112 Examples seen. Accuracy:0.9549 Error: 0.07075 Loss:0.05845 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.64s\n", - "1664752 Examples seen. Accuracy:0.9549 Error: 0.08263 Loss:0.09076 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "1665392 Examples seen. Accuracy:0.9549 Error: 0.11192 Loss:0.14379 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.66s\n", - "1666032 Examples seen. Accuracy:0.9544 Error: 0.10133 Loss:0.10255 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.65s\n", - "1666672 Examples seen. Accuracy:0.9541 Error: 0.16394 Loss:0.15335 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.65s\n", - "1667312 Examples seen. Accuracy:0.9531 Error: 0.18874 Loss:0.20483 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "1667952 Examples seen. Accuracy:0.9545 Error: 0.10851 Loss:0.10826 Threads: 8 Forward time: 5.13s Backward time: 3.25s Step time: 3.66s\n", - "1668592 Examples seen. Accuracy:0.9539 Error: 0.20188 Loss:0.30046 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.67s\n", - "1669232 Examples seen. Accuracy:0.9534 Error: 0.09418 Loss:0.12630 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.72s\n", - "1669872 Examples seen. Accuracy:0.9526 Error: 0.10075 Loss:0.06494 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.67s\n", - "1670512 Examples seen. Accuracy:0.9534 Error: 0.10998 Loss:0.13485 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1671152 Examples seen. Accuracy:0.9526 Error: 0.14655 Loss:0.29125 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.69s\n", - "1671792 Examples seen. Accuracy:0.9514 Error: 0.08915 Loss:0.08607 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.70s\n", - "1672432 Examples seen. Accuracy:0.9512 Error: 0.14007 Loss:0.10383 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "1673072 Examples seen. Accuracy:0.9511 Error: 0.11376 Loss:0.07999 Threads: 8 Forward time: 5.11s Backward time: 3.26s Step time: 3.72s\n", - "1673712 Examples seen. Accuracy:0.9517 Error: 0.23166 Loss:0.29188 Threads: 8 Forward time: 5.11s Backward time: 3.27s Step time: 3.75s\n", - "1674352 Examples seen. Accuracy:0.9515 Error: 0.05771 Loss:0.04650 Threads: 8 Forward time: 5.11s Backward time: 3.26s Step time: 3.78s\n", - "1674992 Examples seen. Accuracy:0.9521 Error: 0.12818 Loss:0.12640 Threads: 8 Forward time: 5.13s Backward time: 3.24s Step time: 3.72s\n", - "1675632 Examples seen. Accuracy:0.9524 Error: 0.04559 Loss:0.03854 Threads: 8 Forward time: 4.86s Backward time: 3.17s Step time: 3.74s\n", - "1676272 Examples seen. Accuracy:0.9520 Error: 0.14036 Loss:0.12077 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "1676912 Examples seen. Accuracy:0.9529 Error: 0.08069 Loss:0.09086 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "1677552 Examples seen. Accuracy:0.9521 Error: 0.13960 Loss:0.12760 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "1678192 Examples seen. Accuracy:0.9529 Error: 0.14290 Loss:0.13268 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.66s\n", - "1678832 Examples seen. Accuracy:0.9529 Error: 0.16977 Loss:0.17442 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.70s\n", - "1679472 Examples seen. Accuracy:0.9542 Error: 0.13151 Loss:0.15229 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.69s\n", - "1680112 Examples seen. Accuracy:0.9535 Error: 0.13522 Loss:0.12510 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.63s\n", - "1680752 Examples seen. Accuracy:0.9547 Error: 0.18679 Loss:0.21029 Threads: 8 Forward time: 4.97s Backward time: 3.17s Step time: 3.65s\n", - "1681392 Examples seen. Accuracy:0.9538 Error: 0.08448 Loss:0.05118 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.67s\n", - "1682032 Examples seen. Accuracy:0.9540 Error: 0.11112 Loss:0.10243 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.66s\n", - "1682672 Examples seen. Accuracy:0.9533 Error: 0.18357 Loss:0.26247 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1683312 Examples seen. Accuracy:0.9527 Error: 0.15348 Loss:0.19900 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.64s\n", - "1683952 Examples seen. Accuracy:0.9534 Error: 0.07557 Loss:0.05268 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.65s\n", - "1684592 Examples seen. Accuracy:0.9531 Error: 0.17874 Loss:0.15898 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1685232 Examples seen. Accuracy:0.9541 Error: 0.09773 Loss:0.10179 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.65s\n", - "1685872 Examples seen. Accuracy:0.9546 Error: 0.10309 Loss:0.08649 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.63s\n", - "1686512 Examples seen. Accuracy:0.9551 Error: 0.11693 Loss:0.10746 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "1687152 Examples seen. Accuracy:0.9557 Error: 0.09098 Loss:0.07947 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.82s\n", - "1687792 Examples seen. Accuracy:0.9546 Error: 0.09234 Loss:0.07160 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.68s\n", - "1688432 Examples seen. Accuracy:0.9543 Error: 0.06248 Loss:0.03925 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.64s\n", - "1689072 Examples seen. Accuracy:0.9535 Error: 0.17118 Loss:0.18170 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.64s\n", - "1689712 Examples seen. Accuracy:0.9540 Error: 0.14168 Loss:0.11793 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.67s\n", - "1690352 Examples seen. Accuracy:0.9536 Error: 0.09648 Loss:0.09039 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.67s\n", - "1690992 Examples seen. Accuracy:0.9540 Error: 0.11581 Loss:0.12648 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.64s\n", - "1691632 Examples seen. Accuracy:0.9533 Error: 0.15335 Loss:0.11384 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.70s\n", - "1692272 Examples seen. Accuracy:0.9526 Error: 0.20354 Loss:0.17184 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 3.69s\n", - "1692912 Examples seen. Accuracy:0.9530 Error: 0.11656 Loss:0.08986 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.68s\n", - "1693552 Examples seen. Accuracy:0.9541 Error: 0.09202 Loss:0.10550 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.64s\n", - "1694192 Examples seen. Accuracy:0.9552 Error: 0.05833 Loss:0.03385 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.66s\n", - "1694832 Examples seen. Accuracy:0.9553 Error: 0.03285 Loss:0.02443 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.66s\n", - "1695472 Examples seen. Accuracy:0.9552 Error: 0.09947 Loss:0.06305 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.72s\n", - "1696112 Examples seen. Accuracy:0.9555 Error: 0.17509 Loss:0.30145 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 34 Examples seen:1696736 Validation Accuracy: 0.9833 Validation Error: 0.0488 Validation Loss: 0.0500 Total time: 184.93min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 34. Working time: 3.08 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1697376 Examples seen. Accuracy:0.9564 Error: 0.12370 Loss:0.11274 Threads: 8 Forward time: 5.03s Backward time: 3.28s Step time: 4.00s\n", - "1698016 Examples seen. Accuracy:0.9581 Error: 0.06149 Loss:0.03997 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.75s\n", - "1698656 Examples seen. Accuracy:0.9563 Error: 0.11511 Loss:0.12596 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.72s\n", - "1699296 Examples seen. Accuracy:0.9575 Error: 0.04369 Loss:0.03368 Threads: 8 Forward time: 5.21s Backward time: 3.39s Step time: 3.78s\n", - "1699936 Examples seen. Accuracy:0.9581 Error: 0.14284 Loss:0.10989 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.73s\n", - "1700576 Examples seen. Accuracy:0.9573 Error: 0.12422 Loss:0.10745 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.69s\n", - "1701216 Examples seen. Accuracy:0.9565 Error: 0.07989 Loss:0.06614 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.68s\n", - "1701856 Examples seen. Accuracy:0.9572 Error: 0.03664 Loss:0.04892 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.69s\n", - "1702496 Examples seen. Accuracy:0.9562 Error: 0.08763 Loss:0.08473 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.71s\n", - "1703136 Examples seen. Accuracy:0.9564 Error: 0.09152 Loss:0.08435 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.72s\n", - "1703776 Examples seen. Accuracy:0.9564 Error: 0.15905 Loss:0.19017 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.61s\n", - "1704416 Examples seen. Accuracy:0.9566 Error: 0.02629 Loss:0.01696 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.62s\n", - "1705056 Examples seen. Accuracy:0.9575 Error: 0.10146 Loss:0.06423 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.63s\n", - "1705696 Examples seen. Accuracy:0.9574 Error: 0.11619 Loss:0.15490 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.64s\n", - "1706336 Examples seen. Accuracy:0.9579 Error: 0.09154 Loss:0.06040 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.66s\n", - "1706976 Examples seen. Accuracy:0.9571 Error: 0.09999 Loss:0.09077 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.65s\n", - "1707616 Examples seen. Accuracy:0.9577 Error: 0.10589 Loss:0.12010 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1708256 Examples seen. Accuracy:0.9556 Error: 0.09545 Loss:0.12080 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.65s\n", - "1708896 Examples seen. Accuracy:0.9525 Error: 0.18734 Loss:0.30184 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.66s\n", - "1709536 Examples seen. Accuracy:0.9527 Error: 0.09554 Loss:0.07707 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "1710176 Examples seen. Accuracy:0.9503 Error: 0.17724 Loss:0.19445 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.67s\n", - "1710816 Examples seen. Accuracy:0.9512 Error: 0.05224 Loss:0.02879 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.67s\n", - "1711456 Examples seen. Accuracy:0.9518 Error: 0.16336 Loss:0.15816 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.65s\n", - "1712096 Examples seen. Accuracy:0.9521 Error: 0.12217 Loss:0.11052 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.69s\n", - "1712736 Examples seen. Accuracy:0.9515 Error: 0.16639 Loss:0.16000 Threads: 8 Forward time: 5.14s Backward time: 3.32s Step time: 3.77s\n", - "1713376 Examples seen. Accuracy:0.9515 Error: 0.06803 Loss:0.05030 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.68s\n", - "1714016 Examples seen. Accuracy:0.9513 Error: 0.11856 Loss:0.09933 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "1714656 Examples seen. Accuracy:0.9522 Error: 0.07996 Loss:0.06198 Threads: 8 Forward time: 5.10s Backward time: 3.29s Step time: 3.69s\n", - "1715296 Examples seen. Accuracy:0.9532 Error: 0.14122 Loss:0.14930 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.70s\n", - "1715936 Examples seen. Accuracy:0.9548 Error: 0.13314 Loss:0.10416 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.65s\n", - "1716576 Examples seen. Accuracy:0.9564 Error: 0.07180 Loss:0.04883 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.82s\n", - "1717216 Examples seen. Accuracy:0.9568 Error: 0.14105 Loss:0.12290 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.79s\n", - "1717856 Examples seen. Accuracy:0.9571 Error: 0.12672 Loss:0.13729 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.73s\n", - "1718496 Examples seen. Accuracy:0.9586 Error: 0.08768 Loss:0.06487 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.74s\n", - "1719136 Examples seen. Accuracy:0.9579 Error: 0.14699 Loss:0.17922 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.68s\n", - "1719776 Examples seen. Accuracy:0.9570 Error: 0.16219 Loss:0.16167 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.68s\n", - "1720416 Examples seen. Accuracy:0.9570 Error: 0.15683 Loss:0.13489 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.72s\n", - "1721056 Examples seen. Accuracy:0.9565 Error: 0.21208 Loss:0.26963 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.70s\n", - "1721696 Examples seen. Accuracy:0.9550 Error: 0.15612 Loss:0.15786 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.71s\n", - "1722336 Examples seen. Accuracy:0.9545 Error: 0.12294 Loss:0.12137 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.72s\n", - "1722976 Examples seen. Accuracy:0.9531 Error: 0.17181 Loss:0.18947 Threads: 8 Forward time: 5.09s Backward time: 3.31s Step time: 3.68s\n", - "1723616 Examples seen. Accuracy:0.9524 Error: 0.10175 Loss:0.08284 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.67s\n", - "1724256 Examples seen. Accuracy:0.9523 Error: 0.07619 Loss:0.09009 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.67s\n", - "1724896 Examples seen. Accuracy:0.9531 Error: 0.12681 Loss:0.10745 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "1725536 Examples seen. Accuracy:0.9534 Error: 0.12187 Loss:0.10131 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.66s\n", - "1726176 Examples seen. Accuracy:0.9545 Error: 0.13699 Loss:0.09375 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.72s\n", - "1726816 Examples seen. Accuracy:0.9556 Error: 0.10385 Loss:0.07738 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.69s\n", - "1727456 Examples seen. Accuracy:0.9557 Error: 0.14743 Loss:0.20727 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.70s\n", - "1728096 Examples seen. Accuracy:0.9574 Error: 0.06703 Loss:0.05142 Threads: 8 Forward time: 5.05s Backward time: 3.29s Step time: 3.70s\n", - "1728736 Examples seen. Accuracy:0.9589 Error: 0.08880 Loss:0.08136 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.73s\n", - "1729376 Examples seen. Accuracy:0.9601 Error: 0.06884 Loss:0.04845 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.67s\n", - "1730016 Examples seen. Accuracy:0.9594 Error: 0.09164 Loss:0.05929 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.70s\n", - "1730656 Examples seen. Accuracy:0.9588 Error: 0.12068 Loss:0.12526 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.70s\n", - "1731296 Examples seen. Accuracy:0.9577 Error: 0.13381 Loss:0.11213 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.75s\n", - "1731936 Examples seen. Accuracy:0.9586 Error: 0.09838 Loss:0.09358 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.72s\n", - "1732576 Examples seen. Accuracy:0.9588 Error: 0.09802 Loss:0.10131 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "1733216 Examples seen. Accuracy:0.9582 Error: 0.17533 Loss:0.31308 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.72s\n", - "1733856 Examples seen. Accuracy:0.9588 Error: 0.09843 Loss:0.08373 Threads: 8 Forward time: 5.07s Backward time: 3.28s Step time: 3.73s\n", - "1734496 Examples seen. Accuracy:0.9592 Error: 0.09206 Loss:0.06744 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.70s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1735136 Examples seen. Accuracy:0.9586 Error: 0.21750 Loss:0.23582 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.73s\n", - "1735776 Examples seen. Accuracy:0.9582 Error: 0.14068 Loss:0.17215 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.74s\n", - "1736416 Examples seen. Accuracy:0.9586 Error: 0.10147 Loss:0.11270 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.69s\n", - "1737056 Examples seen. Accuracy:0.9575 Error: 0.09361 Loss:0.06872 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.69s\n", - "1737696 Examples seen. Accuracy:0.9566 Error: 0.18389 Loss:0.18223 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.69s\n", - "1738336 Examples seen. Accuracy:0.9567 Error: 0.10437 Loss:0.08243 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.69s\n", - "1738976 Examples seen. Accuracy:0.9567 Error: 0.13030 Loss:0.11569 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.67s\n", - "1739616 Examples seen. Accuracy:0.9581 Error: 0.08965 Loss:0.10235 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1740256 Examples seen. Accuracy:0.9588 Error: 0.11732 Loss:0.07823 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "1740896 Examples seen. Accuracy:0.9586 Error: 0.19713 Loss:0.20001 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.66s\n", - "1741536 Examples seen. Accuracy:0.9599 Error: 0.08251 Loss:0.05678 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1742176 Examples seen. Accuracy:0.9589 Error: 0.08973 Loss:0.06632 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.68s\n", - "1742816 Examples seen. Accuracy:0.9584 Error: 0.06525 Loss:0.04217 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.72s\n", - "1743456 Examples seen. Accuracy:0.9584 Error: 0.10065 Loss:0.07313 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1744096 Examples seen. Accuracy:0.9562 Error: 0.14727 Loss:0.16625 Threads: 8 Forward time: 4.92s Backward time: 3.18s Step time: 3.64s\n", - "1744736 Examples seen. Accuracy:0.9566 Error: 0.11290 Loss:0.06894 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.68s\n", - "1745376 Examples seen. Accuracy:0.9551 Error: 0.19128 Loss:0.26201 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.66s\n", - "1746016 Examples seen. Accuracy:0.9553 Error: 0.12488 Loss:0.10196 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.60s\n", - "Starting Validation.\n", - "Epochs: 35 Examples seen:1746640 Validation Accuracy: 0.9833 Validation Error: 0.0476 Validation Loss: 0.0481 Total time: 190.20min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 35. Working time: 3.17 hours.\n", - "1747280 Examples seen. Accuracy:0.9564 Error: 0.14957 Loss:0.14683 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.73s\n", - "1747920 Examples seen. Accuracy:0.9557 Error: 0.13448 Loss:0.13733 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.68s\n", - "1748560 Examples seen. Accuracy:0.9556 Error: 0.04399 Loss:0.02454 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.69s\n", - "1749200 Examples seen. Accuracy:0.9563 Error: 0.13650 Loss:0.13516 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.66s\n", - "1749840 Examples seen. Accuracy:0.9564 Error: 0.11992 Loss:0.11011 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.66s\n", - "1750480 Examples seen. Accuracy:0.9562 Error: 0.13378 Loss:0.14236 Threads: 8 Forward time: 5.10s Backward time: 3.24s Step time: 3.82s\n", - "1751120 Examples seen. Accuracy:0.9559 Error: 0.07769 Loss:0.06909 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.63s\n", - "1751760 Examples seen. Accuracy:0.9555 Error: 0.14064 Loss:0.14833 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.61s\n", - "1752400 Examples seen. Accuracy:0.9556 Error: 0.18615 Loss:0.25593 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "1753040 Examples seen. Accuracy:0.9567 Error: 0.07653 Loss:0.04897 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.70s\n", - "1753680 Examples seen. Accuracy:0.9565 Error: 0.11759 Loss:0.11167 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.61s\n", - "1754320 Examples seen. Accuracy:0.9561 Error: 0.08570 Loss:0.10862 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1754960 Examples seen. Accuracy:0.9550 Error: 0.17914 Loss:0.19847 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.62s\n", - "1755600 Examples seen. Accuracy:0.9551 Error: 0.09134 Loss:0.12681 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1756240 Examples seen. Accuracy:0.9555 Error: 0.08776 Loss:0.06581 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.63s\n", - "1756880 Examples seen. Accuracy:0.9556 Error: 0.07670 Loss:0.08249 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.64s\n", - "1757520 Examples seen. Accuracy:0.9538 Error: 0.14102 Loss:0.10427 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.61s\n", - "1758160 Examples seen. Accuracy:0.9532 Error: 0.19786 Loss:0.22957 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.61s\n", - "1758800 Examples seen. Accuracy:0.9539 Error: 0.07255 Loss:0.07461 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.63s\n", - "1759440 Examples seen. Accuracy:0.9539 Error: 0.07666 Loss:0.09994 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.63s\n", - "1760080 Examples seen. Accuracy:0.9549 Error: 0.12176 Loss:0.08088 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.65s\n", - "1760720 Examples seen. Accuracy:0.9543 Error: 0.10170 Loss:0.09934 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.61s\n", - "1761360 Examples seen. Accuracy:0.9542 Error: 0.10327 Loss:0.08137 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.64s\n", - "1762000 Examples seen. Accuracy:0.9544 Error: 0.09634 Loss:0.14334 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.62s\n", - "1762640 Examples seen. Accuracy:0.9541 Error: 0.12729 Loss:0.11145 Threads: 8 Forward time: 4.92s Backward time: 3.19s Step time: 3.67s\n", - "1763280 Examples seen. Accuracy:0.9551 Error: 0.14049 Loss:0.16942 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.62s\n", - "1763920 Examples seen. Accuracy:0.9539 Error: 0.08826 Loss:0.07529 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "1764560 Examples seen. Accuracy:0.9541 Error: 0.16872 Loss:0.18725 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.63s\n", - "1765200 Examples seen. Accuracy:0.9546 Error: 0.03819 Loss:0.02531 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.82s\n", - "1765840 Examples seen. Accuracy:0.9549 Error: 0.09602 Loss:0.09509 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "1766480 Examples seen. Accuracy:0.9561 Error: 0.11040 Loss:0.16706 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.64s\n", - "1767120 Examples seen. Accuracy:0.9566 Error: 0.14064 Loss:0.18925 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.66s\n", - "1767760 Examples seen. Accuracy:0.9559 Error: 0.17949 Loss:0.23182 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.68s\n", - "1768400 Examples seen. Accuracy:0.9551 Error: 0.06496 Loss:0.05117 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.67s\n", - "1769040 Examples seen. Accuracy:0.9542 Error: 0.15961 Loss:0.17385 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1769680 Examples seen. Accuracy:0.9538 Error: 0.08835 Loss:0.07721 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.61s\n", - "1770320 Examples seen. Accuracy:0.9535 Error: 0.15800 Loss:0.13758 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.62s\n", - "1770960 Examples seen. Accuracy:0.9544 Error: 0.06439 Loss:0.04447 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.63s\n", - "1771600 Examples seen. Accuracy:0.9544 Error: 0.06153 Loss:0.04529 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.62s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1772240 Examples seen. Accuracy:0.9546 Error: 0.08856 Loss:0.05875 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1772880 Examples seen. Accuracy:0.9546 Error: 0.11530 Loss:0.17189 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.66s\n", - "1773520 Examples seen. Accuracy:0.9545 Error: 0.08569 Loss:0.12847 Threads: 8 Forward time: 4.88s Backward time: 3.21s Step time: 3.70s\n", - "1774160 Examples seen. Accuracy:0.9552 Error: 0.13763 Loss:0.09882 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.60s\n", - "1774800 Examples seen. Accuracy:0.9545 Error: 0.15904 Loss:0.15691 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.65s\n", - "1775440 Examples seen. Accuracy:0.9547 Error: 0.10099 Loss:0.20617 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.62s\n", - "1776080 Examples seen. Accuracy:0.9555 Error: 0.08378 Loss:0.05610 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.61s\n", - "1776720 Examples seen. Accuracy:0.9565 Error: 0.15303 Loss:0.18980 Threads: 8 Forward time: 4.90s Backward time: 3.22s Step time: 3.60s\n", - "1777360 Examples seen. Accuracy:0.9561 Error: 0.08778 Loss:0.05999 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.60s\n", - "1778000 Examples seen. Accuracy:0.9563 Error: 0.10252 Loss:0.19915 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "1778640 Examples seen. Accuracy:0.9563 Error: 0.10759 Loss:0.11619 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1779280 Examples seen. Accuracy:0.9564 Error: 0.10677 Loss:0.06706 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.63s\n", - "1779920 Examples seen. Accuracy:0.9573 Error: 0.03707 Loss:0.02065 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.61s\n", - "1780560 Examples seen. Accuracy:0.9578 Error: 0.06614 Loss:0.06260 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.61s\n", - "1781200 Examples seen. Accuracy:0.9575 Error: 0.14578 Loss:0.15337 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.62s\n", - "1781840 Examples seen. Accuracy:0.9564 Error: 0.14160 Loss:0.11122 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "1782480 Examples seen. Accuracy:0.9557 Error: 0.18747 Loss:0.18676 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1783120 Examples seen. Accuracy:0.9561 Error: 0.07407 Loss:0.05903 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.61s\n", - "1783760 Examples seen. Accuracy:0.9572 Error: 0.07691 Loss:0.14365 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.68s\n", - "1784400 Examples seen. Accuracy:0.9573 Error: 0.21152 Loss:0.29385 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.69s\n", - "1785040 Examples seen. Accuracy:0.9568 Error: 0.14541 Loss:0.23907 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1785680 Examples seen. Accuracy:0.9561 Error: 0.09648 Loss:0.08535 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "1786320 Examples seen. Accuracy:0.9560 Error: 0.07973 Loss:0.06297 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.67s\n", - "1786960 Examples seen. Accuracy:0.9550 Error: 0.14013 Loss:0.14395 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.69s\n", - "1787600 Examples seen. Accuracy:0.9557 Error: 0.09567 Loss:0.06545 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.63s\n", - "1788240 Examples seen. Accuracy:0.9554 Error: 0.05980 Loss:0.03617 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1788880 Examples seen. Accuracy:0.9549 Error: 0.07754 Loss:0.08508 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.65s\n", - "1789520 Examples seen. Accuracy:0.9551 Error: 0.05611 Loss:0.04648 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.65s\n", - "1790160 Examples seen. Accuracy:0.9543 Error: 0.17148 Loss:0.23611 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.73s\n", - "1790800 Examples seen. Accuracy:0.9545 Error: 0.07240 Loss:0.05229 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.75s\n", - "1791440 Examples seen. Accuracy:0.9560 Error: 0.17142 Loss:0.22017 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.81s\n", - "1792080 Examples seen. Accuracy:0.9574 Error: 0.14793 Loss:0.10845 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.69s\n", - "1792720 Examples seen. Accuracy:0.9576 Error: 0.07952 Loss:0.06775 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.66s\n", - "1793360 Examples seen. Accuracy:0.9568 Error: 0.19621 Loss:0.21966 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.64s\n", - "1794000 Examples seen. Accuracy:0.9568 Error: 0.09204 Loss:0.13662 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1794640 Examples seen. Accuracy:0.9559 Error: 0.14270 Loss:0.12991 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.64s\n", - "1795280 Examples seen. Accuracy:0.9555 Error: 0.06584 Loss:0.08834 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1795920 Examples seen. Accuracy:0.9565 Error: 0.08207 Loss:0.06582 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.60s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 36 Examples seen:1796544 Validation Accuracy: 0.9841 Validation Error: 0.0469 Validation Loss: 0.0469 Total time: 195.42min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 36. Working time: 3.26 hours.\n", - "1797184 Examples seen. Accuracy:0.9564 Error: 0.11982 Loss:0.12715 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.78s\n", - "1797824 Examples seen. Accuracy:0.9573 Error: 0.06619 Loss:0.04556 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.66s\n", - "1798464 Examples seen. Accuracy:0.9571 Error: 0.18513 Loss:0.17599 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.65s\n", - "1799104 Examples seen. Accuracy:0.9559 Error: 0.15037 Loss:0.23883 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.70s\n", - "1799744 Examples seen. Accuracy:0.9545 Error: 0.10704 Loss:0.12288 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.70s\n", - "1800384 Examples seen. Accuracy:0.9549 Error: 0.12290 Loss:0.14029 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.64s\n", - "1801024 Examples seen. Accuracy:0.9547 Error: 0.09161 Loss:0.06599 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.67s\n", - "1801664 Examples seen. Accuracy:0.9550 Error: 0.07423 Loss:0.08698 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "1802304 Examples seen. Accuracy:0.9547 Error: 0.13005 Loss:0.10568 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.70s\n", - "1802944 Examples seen. Accuracy:0.9549 Error: 0.23751 Loss:0.30969 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1803584 Examples seen. Accuracy:0.9554 Error: 0.16388 Loss:0.16303 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.67s\n", - "1804224 Examples seen. Accuracy:0.9546 Error: 0.11821 Loss:0.08422 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.76s\n", - "1804864 Examples seen. Accuracy:0.9531 Error: 0.14995 Loss:0.14358 Threads: 8 Forward time: 5.16s Backward time: 3.30s Step time: 3.82s\n", - "1805504 Examples seen. Accuracy:0.9529 Error: 0.08595 Loss:0.07757 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.78s\n", - "1806144 Examples seen. Accuracy:0.9524 Error: 0.16161 Loss:0.17668 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.74s\n", - "1806784 Examples seen. Accuracy:0.9526 Error: 0.06927 Loss:0.04835 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.71s\n", - "1807424 Examples seen. Accuracy:0.9539 Error: 0.07400 Loss:0.04514 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.67s\n", - "1808064 Examples seen. Accuracy:0.9542 Error: 0.07363 Loss:0.06349 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.74s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1808704 Examples seen. Accuracy:0.9546 Error: 0.04456 Loss:0.02821 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.70s\n", - "1809344 Examples seen. Accuracy:0.9550 Error: 0.11859 Loss:0.11527 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.68s\n", - "1809984 Examples seen. Accuracy:0.9552 Error: 0.09323 Loss:0.06274 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.68s\n", - "1810624 Examples seen. Accuracy:0.9536 Error: 0.09031 Loss:0.08418 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.72s\n", - "1811264 Examples seen. Accuracy:0.9541 Error: 0.09636 Loss:0.09265 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.75s\n", - "1811904 Examples seen. Accuracy:0.9546 Error: 0.08280 Loss:0.06800 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.81s\n", - "1812544 Examples seen. Accuracy:0.9551 Error: 0.03604 Loss:0.02002 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.70s\n", - "1813184 Examples seen. Accuracy:0.9557 Error: 0.05730 Loss:0.03895 Threads: 8 Forward time: 5.19s Backward time: 3.27s Step time: 3.80s\n", - "1813824 Examples seen. Accuracy:0.9549 Error: 0.13987 Loss:0.20327 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.77s\n", - "1814464 Examples seen. Accuracy:0.9551 Error: 0.12910 Loss:0.13366 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.70s\n", - "1815104 Examples seen. Accuracy:0.9548 Error: 0.10973 Loss:0.19730 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.69s\n", - "1815744 Examples seen. Accuracy:0.9545 Error: 0.20201 Loss:0.26006 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.69s\n", - "1816384 Examples seen. Accuracy:0.9554 Error: 0.06838 Loss:0.09450 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.71s\n", - "1817024 Examples seen. Accuracy:0.9556 Error: 0.09218 Loss:0.14568 Threads: 8 Forward time: 5.16s Backward time: 3.33s Step time: 3.74s\n", - "1817664 Examples seen. Accuracy:0.9565 Error: 0.10247 Loss:0.08514 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.72s\n", - "1818304 Examples seen. Accuracy:0.9568 Error: 0.09302 Loss:0.07729 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.71s\n", - "1818944 Examples seen. Accuracy:0.9556 Error: 0.10434 Loss:0.10841 Threads: 8 Forward time: 5.16s Backward time: 3.30s Step time: 3.81s\n", - "1819584 Examples seen. Accuracy:0.9547 Error: 0.15566 Loss:0.17934 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.67s\n", - "1820224 Examples seen. Accuracy:0.9534 Error: 0.09787 Loss:0.09881 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.68s\n", - "1820864 Examples seen. Accuracy:0.9548 Error: 0.09613 Loss:0.08051 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.73s\n", - "1821504 Examples seen. Accuracy:0.9548 Error: 0.10382 Loss:0.17015 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1822144 Examples seen. Accuracy:0.9555 Error: 0.08964 Loss:0.10181 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.64s\n", - "1822784 Examples seen. Accuracy:0.9545 Error: 0.14913 Loss:0.18329 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.61s\n", - "1823424 Examples seen. Accuracy:0.9543 Error: 0.12083 Loss:0.09757 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.62s\n", - "1824064 Examples seen. Accuracy:0.9546 Error: 0.10317 Loss:0.09144 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "1824704 Examples seen. Accuracy:0.9549 Error: 0.13307 Loss:0.14256 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.60s\n", - "1825344 Examples seen. Accuracy:0.9548 Error: 0.06797 Loss:0.07932 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "1825984 Examples seen. Accuracy:0.9549 Error: 0.10097 Loss:0.08659 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.61s\n", - "1826624 Examples seen. Accuracy:0.9549 Error: 0.17762 Loss:0.19080 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.61s\n", - "1827264 Examples seen. Accuracy:0.9541 Error: 0.15394 Loss:0.11577 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "1827904 Examples seen. Accuracy:0.9540 Error: 0.15268 Loss:0.13558 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.67s\n", - "1828544 Examples seen. Accuracy:0.9533 Error: 0.11208 Loss:0.11845 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.85s\n", - "1829184 Examples seen. Accuracy:0.9534 Error: 0.12593 Loss:0.12251 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.69s\n", - "1829824 Examples seen. Accuracy:0.9540 Error: 0.11013 Loss:0.07358 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.73s\n", - "1830464 Examples seen. Accuracy:0.9543 Error: 0.15936 Loss:0.15270 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.76s\n", - "1831104 Examples seen. Accuracy:0.9548 Error: 0.09720 Loss:0.11979 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.76s\n", - "1831744 Examples seen. Accuracy:0.9542 Error: 0.09560 Loss:0.08874 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.64s\n", - "1832384 Examples seen. Accuracy:0.9550 Error: 0.04208 Loss:0.02491 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.85s\n", - "1833024 Examples seen. Accuracy:0.9549 Error: 0.15678 Loss:0.17918 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.70s\n", - "1833664 Examples seen. Accuracy:0.9546 Error: 0.03412 Loss:0.01928 Threads: 8 Forward time: 5.26s Backward time: 3.34s Step time: 3.78s\n", - "1834304 Examples seen. Accuracy:0.9548 Error: 0.15983 Loss:0.18012 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.72s\n", - "1834944 Examples seen. Accuracy:0.9550 Error: 0.07554 Loss:0.08495 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.63s\n", - "1835584 Examples seen. Accuracy:0.9569 Error: 0.21244 Loss:0.28960 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.62s\n", - "1836224 Examples seen. Accuracy:0.9571 Error: 0.18464 Loss:0.29235 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.62s\n", - "1836864 Examples seen. Accuracy:0.9554 Error: 0.14169 Loss:0.16778 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.67s\n", - "1837504 Examples seen. Accuracy:0.9545 Error: 0.12011 Loss:0.13473 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.70s\n", - "1838144 Examples seen. Accuracy:0.9538 Error: 0.12203 Loss:0.12402 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.65s\n", - "1838784 Examples seen. Accuracy:0.9543 Error: 0.10910 Loss:0.07847 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.62s\n", - "1839424 Examples seen. Accuracy:0.9547 Error: 0.05406 Loss:0.03135 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.61s\n", - "1840064 Examples seen. Accuracy:0.9546 Error: 0.09812 Loss:0.07594 Threads: 8 Forward time: 5.02s Backward time: 3.19s Step time: 3.61s\n", - "1840704 Examples seen. Accuracy:0.9543 Error: 0.13101 Loss:0.13506 Threads: 8 Forward time: 5.07s Backward time: 3.21s Step time: 3.68s\n", - "1841344 Examples seen. Accuracy:0.9551 Error: 0.14208 Loss:0.13934 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.68s\n", - "1841984 Examples seen. Accuracy:0.9564 Error: 0.18013 Loss:0.20877 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.72s\n", - "1842624 Examples seen. Accuracy:0.9565 Error: 0.06567 Loss:0.05223 Threads: 8 Forward time: 4.92s Backward time: 3.17s Step time: 3.60s\n", - "1843264 Examples seen. Accuracy:0.9563 Error: 0.06501 Loss:0.08944 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "1843904 Examples seen. Accuracy:0.9570 Error: 0.09017 Loss:0.07286 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.61s\n", - "1844544 Examples seen. Accuracy:0.9574 Error: 0.11758 Loss:0.14370 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.62s\n", - "1845184 Examples seen. Accuracy:0.9570 Error: 0.09450 Loss:0.08837 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "1845824 Examples seen. Accuracy:0.9568 Error: 0.08396 Loss:0.09636 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "Epochs: 37 Examples seen:1846448 Validation Accuracy: 0.9841 Validation Error: 0.0461 Validation Loss: 0.0458 Total time: 200.69min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 37. Working time: 3.34 hours.\n", - "1847088 Examples seen. Accuracy:0.9562 Error: 0.11819 Loss:0.10164 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.93s\n", - "1847728 Examples seen. Accuracy:0.9574 Error: 0.10055 Loss:0.09452 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.64s\n", - "1848368 Examples seen. Accuracy:0.9561 Error: 0.09441 Loss:0.12030 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.63s\n", - "1849008 Examples seen. Accuracy:0.9565 Error: 0.08285 Loss:0.09775 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1849648 Examples seen. Accuracy:0.9560 Error: 0.11474 Loss:0.07778 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.66s\n", - "1850288 Examples seen. Accuracy:0.9552 Error: 0.13941 Loss:0.16255 Threads: 8 Forward time: 5.02s Backward time: 3.17s Step time: 3.67s\n", - "1850928 Examples seen. Accuracy:0.9540 Error: 0.16867 Loss:0.21479 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.70s\n", - "1851568 Examples seen. Accuracy:0.9552 Error: 0.06381 Loss:0.05751 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.68s\n", - "1852208 Examples seen. Accuracy:0.9542 Error: 0.16486 Loss:0.15809 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.67s\n", - "1852848 Examples seen. Accuracy:0.9545 Error: 0.13323 Loss:0.13841 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.68s\n", - "1853488 Examples seen. Accuracy:0.9561 Error: 0.15190 Loss:0.13007 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.71s\n", - "1854128 Examples seen. Accuracy:0.9550 Error: 0.09864 Loss:0.09541 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.69s\n", - "1854768 Examples seen. Accuracy:0.9544 Error: 0.10317 Loss:0.09194 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.69s\n", - "1855408 Examples seen. Accuracy:0.9553 Error: 0.08703 Loss:0.08090 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.70s\n", - "1856048 Examples seen. Accuracy:0.9551 Error: 0.08102 Loss:0.05976 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "1856688 Examples seen. Accuracy:0.9552 Error: 0.17611 Loss:0.20903 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.73s\n", - "1857328 Examples seen. Accuracy:0.9563 Error: 0.11412 Loss:0.19433 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.70s\n", - "1857968 Examples seen. Accuracy:0.9551 Error: 0.12622 Loss:0.10813 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.71s\n", - "1858608 Examples seen. Accuracy:0.9548 Error: 0.08798 Loss:0.07007 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.78s\n", - "1859248 Examples seen. Accuracy:0.9554 Error: 0.11709 Loss:0.11175 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.71s\n", - "1859888 Examples seen. Accuracy:0.9565 Error: 0.07906 Loss:0.08237 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.75s\n", - "1860528 Examples seen. Accuracy:0.9572 Error: 0.09042 Loss:0.11210 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.68s\n", - "1861168 Examples seen. Accuracy:0.9572 Error: 0.11350 Loss:0.13660 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.67s\n", - "1861808 Examples seen. Accuracy:0.9574 Error: 0.10039 Loss:0.10602 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.67s\n", - "1862448 Examples seen. Accuracy:0.9574 Error: 0.07011 Loss:0.07470 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.63s\n", - "1863088 Examples seen. Accuracy:0.9576 Error: 0.00929 Loss:0.00472 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "1863728 Examples seen. Accuracy:0.9566 Error: 0.12854 Loss:0.09041 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "1864368 Examples seen. Accuracy:0.9562 Error: 0.08720 Loss:0.08158 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.64s\n", - "1865008 Examples seen. Accuracy:0.9568 Error: 0.07177 Loss:0.04339 Threads: 8 Forward time: 5.12s Backward time: 3.26s Step time: 3.65s\n", - "1865648 Examples seen. Accuracy:0.9572 Error: 0.14646 Loss:0.15084 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.68s\n", - "1866288 Examples seen. Accuracy:0.9568 Error: 0.09418 Loss:0.11620 Threads: 8 Forward time: 5.63s Backward time: 3.60s Step time: 3.83s\n", - "1866928 Examples seen. Accuracy:0.9583 Error: 0.10999 Loss:0.07401 Threads: 8 Forward time: 5.09s Backward time: 3.25s Step time: 3.66s\n", - "1867568 Examples seen. Accuracy:0.9588 Error: 0.14740 Loss:0.15679 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.72s\n", - "1868208 Examples seen. Accuracy:0.9577 Error: 0.11466 Loss:0.14010 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.75s\n", - "1868848 Examples seen. Accuracy:0.9568 Error: 0.21458 Loss:0.22418 Threads: 8 Forward time: 4.97s Backward time: 3.16s Step time: 3.69s\n", - "1869488 Examples seen. Accuracy:0.9578 Error: 0.12067 Loss:0.14627 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.66s\n", - "1870128 Examples seen. Accuracy:0.9573 Error: 0.15151 Loss:0.10631 Threads: 8 Forward time: 5.05s Backward time: 3.21s Step time: 3.68s\n", - "1870768 Examples seen. Accuracy:0.9579 Error: 0.14390 Loss:0.16780 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.71s\n", - "1871408 Examples seen. Accuracy:0.9572 Error: 0.08003 Loss:0.05318 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.69s\n", - "1872048 Examples seen. Accuracy:0.9574 Error: 0.06995 Loss:0.05608 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.69s\n", - "1872688 Examples seen. Accuracy:0.9574 Error: 0.12168 Loss:0.13208 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.69s\n", - "1873328 Examples seen. Accuracy:0.9571 Error: 0.12677 Loss:0.12205 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.72s\n", - "1873968 Examples seen. Accuracy:0.9570 Error: 0.21795 Loss:0.28115 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.74s\n", - "1874608 Examples seen. Accuracy:0.9572 Error: 0.07045 Loss:0.04531 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.70s\n", - "1875248 Examples seen. Accuracy:0.9570 Error: 0.14072 Loss:0.14882 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.66s\n", - "1875888 Examples seen. Accuracy:0.9563 Error: 0.01611 Loss:0.00855 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.67s\n", - "1876528 Examples seen. Accuracy:0.9570 Error: 0.16832 Loss:0.25454 Threads: 8 Forward time: 4.99s Backward time: 3.18s Step time: 3.65s\n", - "1877168 Examples seen. Accuracy:0.9575 Error: 0.16379 Loss:0.18746 Threads: 8 Forward time: 5.00s Backward time: 3.18s Step time: 3.70s\n", - "1877808 Examples seen. Accuracy:0.9571 Error: 0.13020 Loss:0.19135 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.63s\n", - "1878448 Examples seen. Accuracy:0.9567 Error: 0.06094 Loss:0.03615 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.67s\n", - "1879088 Examples seen. Accuracy:0.9560 Error: 0.11889 Loss:0.09509 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.69s\n", - "1879728 Examples seen. Accuracy:0.9549 Error: 0.27287 Loss:0.31518 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.67s\n", - "1880368 Examples seen. Accuracy:0.9562 Error: 0.10835 Loss:0.07995 Threads: 8 Forward time: 4.96s Backward time: 3.18s Step time: 3.65s\n", - "1881008 Examples seen. Accuracy:0.9566 Error: 0.09757 Loss:0.09872 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.65s\n", - "1881648 Examples seen. Accuracy:0.9569 Error: 0.11494 Loss:0.15117 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.68s\n", - "1882288 Examples seen. Accuracy:0.9579 Error: 0.05830 Loss:0.04952 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.71s\n", - "1882928 Examples seen. Accuracy:0.9581 Error: 0.08375 Loss:0.14745 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1883568 Examples seen. Accuracy:0.9577 Error: 0.09592 Loss:0.11037 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.66s\n", - "1884208 Examples seen. Accuracy:0.9576 Error: 0.16874 Loss:0.19138 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.70s\n", - "1884848 Examples seen. Accuracy:0.9571 Error: 0.06494 Loss:0.04911 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.64s\n", - "1885488 Examples seen. Accuracy:0.9567 Error: 0.16075 Loss:0.16678 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.69s\n", - "1886128 Examples seen. Accuracy:0.9574 Error: 0.04330 Loss:0.02504 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.75s\n", - "1886768 Examples seen. Accuracy:0.9581 Error: 0.01211 Loss:0.00622 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.67s\n", - "1887408 Examples seen. Accuracy:0.9580 Error: 0.08208 Loss:0.07149 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "1888048 Examples seen. Accuracy:0.9592 Error: 0.07264 Loss:0.06598 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1888688 Examples seen. Accuracy:0.9596 Error: 0.14589 Loss:0.20231 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.67s\n", - "1889328 Examples seen. Accuracy:0.9588 Error: 0.12981 Loss:0.14985 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.64s\n", - "1889968 Examples seen. Accuracy:0.9592 Error: 0.10374 Loss:0.14170 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.69s\n", - "1890608 Examples seen. Accuracy:0.9592 Error: 0.04690 Loss:0.03925 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.63s\n", - "1891248 Examples seen. Accuracy:0.9580 Error: 0.17447 Loss:0.17345 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.65s\n", - "1891888 Examples seen. Accuracy:0.9584 Error: 0.06886 Loss:0.06462 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.67s\n", - "1892528 Examples seen. Accuracy:0.9575 Error: 0.01526 Loss:0.00851 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.69s\n", - "1893168 Examples seen. Accuracy:0.9581 Error: 0.09771 Loss:0.08287 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.63s\n", - "1893808 Examples seen. Accuracy:0.9580 Error: 0.07808 Loss:0.10820 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.62s\n", - "1894448 Examples seen. Accuracy:0.9578 Error: 0.15686 Loss:0.15807 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.64s\n", - "1895088 Examples seen. Accuracy:0.9562 Error: 0.21945 Loss:0.23353 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "1895728 Examples seen. Accuracy:0.9578 Error: 0.07588 Loss:0.07329 Threads: 8 Forward time: 4.91s Backward time: 3.19s Step time: 3.60s\n", - "Starting Validation.\n", - "Epochs: 38 Examples seen:1896352 Validation Accuracy: 0.9830 Validation Error: 0.0451 Validation Loss: 0.0452 Total time: 205.94min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 38. Working time: 3.43 hours.\n", - "1896992 Examples seen. Accuracy:0.9574 Error: 0.06946 Loss:0.05478 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "1897632 Examples seen. Accuracy:0.9573 Error: 0.19728 Loss:0.16359 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.60s\n", - "1898272 Examples seen. Accuracy:0.9575 Error: 0.09539 Loss:0.12287 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.63s\n", - "1898912 Examples seen. Accuracy:0.9571 Error: 0.04800 Loss:0.02670 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.64s\n", - "1899552 Examples seen. Accuracy:0.9582 Error: 0.08141 Loss:0.05097 Threads: 8 Forward time: 4.93s Backward time: 3.19s Step time: 3.61s\n", - "1900192 Examples seen. Accuracy:0.9575 Error: 0.15912 Loss:0.13423 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "1900832 Examples seen. Accuracy:0.9576 Error: 0.05816 Loss:0.05678 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.63s\n", - "1901472 Examples seen. Accuracy:0.9561 Error: 0.06190 Loss:0.05640 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.69s\n", - "1902112 Examples seen. Accuracy:0.9558 Error: 0.12134 Loss:0.08576 Threads: 8 Forward time: 5.16s Backward time: 3.26s Step time: 3.74s\n", - "1902752 Examples seen. Accuracy:0.9560 Error: 0.07206 Loss:0.04274 Threads: 8 Forward time: 5.10s Backward time: 3.25s Step time: 3.72s\n", - "1903392 Examples seen. Accuracy:0.9547 Error: 0.07398 Loss:0.08184 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.82s\n", - "1904032 Examples seen. Accuracy:0.9545 Error: 0.13930 Loss:0.09375 Threads: 8 Forward time: 5.12s Backward time: 3.27s Step time: 3.84s\n", - "1904672 Examples seen. Accuracy:0.9559 Error: 0.11337 Loss:0.09635 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.71s\n", - "1905312 Examples seen. Accuracy:0.9562 Error: 0.02659 Loss:0.01801 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.69s\n", - "1905952 Examples seen. Accuracy:0.9562 Error: 0.09483 Loss:0.18519 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.62s\n", - "1906592 Examples seen. Accuracy:0.9560 Error: 0.13859 Loss:0.21150 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "1907232 Examples seen. Accuracy:0.9569 Error: 0.07786 Loss:0.09962 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1907872 Examples seen. Accuracy:0.9575 Error: 0.12255 Loss:0.14138 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.63s\n", - "1908512 Examples seen. Accuracy:0.9575 Error: 0.10497 Loss:0.10718 Threads: 8 Forward time: 4.98s Backward time: 3.16s Step time: 3.63s\n", - "1909152 Examples seen. Accuracy:0.9571 Error: 0.07362 Loss:0.05523 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "1909792 Examples seen. Accuracy:0.9558 Error: 0.23202 Loss:0.31924 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.69s\n", - "1910432 Examples seen. Accuracy:0.9543 Error: 0.18658 Loss:0.17026 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.64s\n", - "1911072 Examples seen. Accuracy:0.9530 Error: 0.20928 Loss:0.21472 Threads: 8 Forward time: 5.11s Backward time: 3.19s Step time: 3.73s\n", - "1911712 Examples seen. Accuracy:0.9522 Error: 0.12081 Loss:0.15239 Threads: 8 Forward time: 5.00s Backward time: 3.17s Step time: 3.69s\n", - "1912352 Examples seen. Accuracy:0.9524 Error: 0.07168 Loss:0.04697 Threads: 8 Forward time: 5.00s Backward time: 3.18s Step time: 3.64s\n", - "1912992 Examples seen. Accuracy:0.9532 Error: 0.05348 Loss:0.03226 Threads: 8 Forward time: 5.03s Backward time: 3.20s Step time: 3.65s\n", - "1913632 Examples seen. Accuracy:0.9546 Error: 0.08172 Loss:0.10057 Threads: 8 Forward time: 5.11s Backward time: 3.22s Step time: 3.69s\n", - "1914272 Examples seen. Accuracy:0.9549 Error: 0.10117 Loss:0.10588 Threads: 8 Forward time: 5.13s Backward time: 3.26s Step time: 3.72s\n", - "1914912 Examples seen. Accuracy:0.9567 Error: 0.13670 Loss:0.13147 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.67s\n", - "1915552 Examples seen. Accuracy:0.9560 Error: 0.07304 Loss:0.05058 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.69s\n", - "1916192 Examples seen. Accuracy:0.9560 Error: 0.07862 Loss:0.06043 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.67s\n", - "1916832 Examples seen. Accuracy:0.9558 Error: 0.10212 Loss:0.10555 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.71s\n", - "1917472 Examples seen. Accuracy:0.9562 Error: 0.06964 Loss:0.05000 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.74s\n", - "1918112 Examples seen. Accuracy:0.9572 Error: 0.06620 Loss:0.05128 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.67s\n", - "1918752 Examples seen. Accuracy:0.9590 Error: 0.06132 Loss:0.04567 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.66s\n", - "1919392 Examples seen. Accuracy:0.9596 Error: 0.10776 Loss:0.12338 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "1920032 Examples seen. Accuracy:0.9587 Error: 0.12756 Loss:0.14233 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1920672 Examples seen. Accuracy:0.9579 Error: 0.16522 Loss:0.20416 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "1921312 Examples seen. Accuracy:0.9565 Error: 0.13033 Loss:0.15052 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.64s\n", - "1921952 Examples seen. Accuracy:0.9570 Error: 0.07184 Loss:0.04647 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.63s\n", - "1922592 Examples seen. Accuracy:0.9582 Error: 0.08658 Loss:0.15437 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1923232 Examples seen. Accuracy:0.9582 Error: 0.11232 Loss:0.12390 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.64s\n", - "1923872 Examples seen. Accuracy:0.9580 Error: 0.14932 Loss:0.13402 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.61s\n", - "1924512 Examples seen. Accuracy:0.9577 Error: 0.02818 Loss:0.01480 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.62s\n", - "1925152 Examples seen. Accuracy:0.9575 Error: 0.03215 Loss:0.01707 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.65s\n", - "1925792 Examples seen. Accuracy:0.9569 Error: 0.09871 Loss:0.08763 Threads: 8 Forward time: 5.09s Backward time: 3.22s Step time: 3.63s\n", - "1926432 Examples seen. Accuracy:0.9567 Error: 0.18634 Loss:0.24356 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.65s\n", - "1927072 Examples seen. Accuracy:0.9566 Error: 0.13298 Loss:0.11218 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.66s\n", - "1927712 Examples seen. Accuracy:0.9557 Error: 0.12890 Loss:0.11758 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1928352 Examples seen. Accuracy:0.9558 Error: 0.16920 Loss:0.15208 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.64s\n", - "1928992 Examples seen. Accuracy:0.9550 Error: 0.10400 Loss:0.16673 Threads: 8 Forward time: 5.07s Backward time: 3.21s Step time: 3.64s\n", - "1929632 Examples seen. Accuracy:0.9551 Error: 0.16371 Loss:0.15684 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.63s\n", - "1930272 Examples seen. Accuracy:0.9549 Error: 0.13895 Loss:0.11924 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.65s\n", - "1930912 Examples seen. Accuracy:0.9553 Error: 0.12780 Loss:0.08297 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.66s\n", - "1931552 Examples seen. Accuracy:0.9566 Error: 0.03757 Loss:0.02385 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1932192 Examples seen. Accuracy:0.9576 Error: 0.11039 Loss:0.11438 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.67s\n", - "1932832 Examples seen. Accuracy:0.9572 Error: 0.12712 Loss:0.15825 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "1933472 Examples seen. Accuracy:0.9571 Error: 0.11167 Loss:0.12649 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.64s\n", - "1934112 Examples seen. Accuracy:0.9571 Error: 0.07529 Loss:0.05379 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.63s\n", - "1934752 Examples seen. Accuracy:0.9574 Error: 0.05598 Loss:0.03662 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.62s\n", - "1935392 Examples seen. Accuracy:0.9576 Error: 0.06798 Loss:0.07322 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.62s\n", - "1936032 Examples seen. Accuracy:0.9570 Error: 0.09908 Loss:0.08005 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.67s\n", - "1936672 Examples seen. Accuracy:0.9558 Error: 0.07398 Loss:0.05030 Threads: 8 Forward time: 4.96s Backward time: 3.18s Step time: 3.65s\n", - "1937312 Examples seen. Accuracy:0.9546 Error: 0.08393 Loss:0.07516 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.63s\n", - "1937952 Examples seen. Accuracy:0.9545 Error: 0.09262 Loss:0.08663 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.64s\n", - "1938592 Examples seen. Accuracy:0.9553 Error: 0.09247 Loss:0.08795 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "1939232 Examples seen. Accuracy:0.9556 Error: 0.15727 Loss:0.15402 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.64s\n", - "1939872 Examples seen. Accuracy:0.9557 Error: 0.13277 Loss:0.12644 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.64s\n", - "1940512 Examples seen. Accuracy:0.9551 Error: 0.13032 Loss:0.11599 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.62s\n", - "1941152 Examples seen. Accuracy:0.9545 Error: 0.13947 Loss:0.12609 Threads: 8 Forward time: 4.94s Backward time: 3.14s Step time: 3.61s\n", - "1941792 Examples seen. Accuracy:0.9551 Error: 0.10305 Loss:0.07518 Threads: 8 Forward time: 4.90s Backward time: 3.17s Step time: 3.60s\n", - "1942432 Examples seen. Accuracy:0.9545 Error: 0.11349 Loss:0.10990 Threads: 8 Forward time: 5.02s Backward time: 3.18s Step time: 3.69s\n", - "1943072 Examples seen. Accuracy:0.9556 Error: 0.09067 Loss:0.11182 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n", - "1943712 Examples seen. Accuracy:0.9552 Error: 0.10892 Loss:0.09714 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.64s\n", - "1944352 Examples seen. Accuracy:0.9562 Error: 0.07942 Loss:0.06084 Threads: 8 Forward time: 5.08s Backward time: 3.24s Step time: 3.65s\n", - "1944992 Examples seen. Accuracy:0.9557 Error: 0.02260 Loss:0.01171 Threads: 8 Forward time: 5.10s Backward time: 3.25s Step time: 3.68s\n", - "1945632 Examples seen. Accuracy:0.9556 Error: 0.12798 Loss:0.20617 Threads: 8 Forward time: 5.08s Backward time: 3.23s Step time: 3.81s\n", - "Starting Validation.\n", - "Epochs: 39 Examples seen:1946256 Validation Accuracy: 0.9833 Validation Error: 0.0440 Validation Loss: 0.0443 Total time: 211.17min\n", - "Epoch time: 5 minutes. 100 epochs: 8.3 hours.\n", - "Epochs: 39. Working time: 3.52 hours.\n", - "1946896 Examples seen. Accuracy:0.9548 Error: 0.07596 Loss:0.06946 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.78s\n", - "1947536 Examples seen. Accuracy:0.9547 Error: 0.03122 Loss:0.01731 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.70s\n", - "1948176 Examples seen. Accuracy:0.9543 Error: 0.08073 Loss:0.05983 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.66s\n", - "1948816 Examples seen. Accuracy:0.9541 Error: 0.05202 Loss:0.03001 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.62s\n", - "1949456 Examples seen. Accuracy:0.9559 Error: 0.01688 Loss:0.00939 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.67s\n", - "1950096 Examples seen. Accuracy:0.9570 Error: 0.12699 Loss:0.11698 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.66s\n", - "1950736 Examples seen. Accuracy:0.9560 Error: 0.11172 Loss:0.07707 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.65s\n", - "1951376 Examples seen. Accuracy:0.9562 Error: 0.11830 Loss:0.09813 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.65s\n", - "1952016 Examples seen. Accuracy:0.9566 Error: 0.07522 Loss:0.05643 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.65s\n", - "1952656 Examples seen. Accuracy:0.9567 Error: 0.09331 Loss:0.07104 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "1953296 Examples seen. Accuracy:0.9572 Error: 0.16513 Loss:0.15516 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.71s\n", - "1953936 Examples seen. Accuracy:0.9578 Error: 0.12486 Loss:0.16671 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.70s\n", - "1954576 Examples seen. Accuracy:0.9589 Error: 0.02811 Loss:0.01570 Threads: 8 Forward time: 5.08s Backward time: 3.22s Step time: 3.76s\n", - "1955216 Examples seen. Accuracy:0.9580 Error: 0.08625 Loss:0.06506 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.68s\n", - "1955856 Examples seen. Accuracy:0.9587 Error: 0.10249 Loss:0.07206 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.68s\n", - "1956496 Examples seen. Accuracy:0.9573 Error: 0.13614 Loss:0.11562 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "1957136 Examples seen. Accuracy:0.9578 Error: 0.07779 Loss:0.05110 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1957776 Examples seen. Accuracy:0.9568 Error: 0.23783 Loss:0.28378 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.68s\n", - "1958416 Examples seen. Accuracy:0.9557 Error: 0.04999 Loss:0.03311 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.70s\n", - "1959056 Examples seen. Accuracy:0.9553 Error: 0.14057 Loss:0.14434 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.72s\n", - "1959696 Examples seen. Accuracy:0.9546 Error: 0.11904 Loss:0.12703 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.69s\n", - "1960336 Examples seen. Accuracy:0.9534 Error: 0.07844 Loss:0.07202 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.71s\n", - "1960976 Examples seen. Accuracy:0.9535 Error: 0.04302 Loss:0.03824 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.75s\n", - "1961616 Examples seen. Accuracy:0.9549 Error: 0.05744 Loss:0.03402 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.71s\n", - "1962256 Examples seen. Accuracy:0.9554 Error: 0.07823 Loss:0.08531 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.70s\n", - "1962896 Examples seen. Accuracy:0.9546 Error: 0.11356 Loss:0.12044 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.67s\n", - "1963536 Examples seen. Accuracy:0.9541 Error: 0.12428 Loss:0.11704 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.66s\n", - "1964176 Examples seen. Accuracy:0.9549 Error: 0.07645 Loss:0.05267 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.66s\n", - "1964816 Examples seen. Accuracy:0.9552 Error: 0.15385 Loss:0.17721 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "1965456 Examples seen. Accuracy:0.9547 Error: 0.18578 Loss:0.31996 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.67s\n", - "1966096 Examples seen. Accuracy:0.9539 Error: 0.07841 Loss:0.06008 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.65s\n", - "1966736 Examples seen. Accuracy:0.9542 Error: 0.13138 Loss:0.14388 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n", - "1967376 Examples seen. Accuracy:0.9552 Error: 0.07811 Loss:0.06564 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.65s\n", - "1968016 Examples seen. Accuracy:0.9547 Error: 0.06264 Loss:0.04557 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.63s\n", - "1968656 Examples seen. Accuracy:0.9550 Error: 0.15017 Loss:0.13880 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1969296 Examples seen. Accuracy:0.9560 Error: 0.16495 Loss:0.30835 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.66s\n", - "1969936 Examples seen. Accuracy:0.9562 Error: 0.15374 Loss:0.14266 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.66s\n", - "1970576 Examples seen. Accuracy:0.9570 Error: 0.16502 Loss:0.24308 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "1971216 Examples seen. Accuracy:0.9572 Error: 0.12129 Loss:0.13275 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "1971856 Examples seen. Accuracy:0.9568 Error: 0.11641 Loss:0.14385 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.64s\n", - "1972496 Examples seen. Accuracy:0.9572 Error: 0.11184 Loss:0.08962 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.66s\n", - "1973136 Examples seen. Accuracy:0.9574 Error: 0.13011 Loss:0.12522 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.67s\n", - "1973776 Examples seen. Accuracy:0.9578 Error: 0.13604 Loss:0.12373 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.67s\n", - "1974416 Examples seen. Accuracy:0.9558 Error: 0.20894 Loss:0.21032 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.68s\n", - "1975056 Examples seen. Accuracy:0.9551 Error: 0.08364 Loss:0.06744 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1975696 Examples seen. Accuracy:0.9564 Error: 0.08421 Loss:0.10634 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.71s\n", - "1976336 Examples seen. Accuracy:0.9566 Error: 0.04915 Loss:0.03122 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.67s\n", - "1976976 Examples seen. Accuracy:0.9555 Error: 0.08979 Loss:0.21834 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.65s\n", - "1977616 Examples seen. Accuracy:0.9564 Error: 0.08140 Loss:0.08624 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1978256 Examples seen. Accuracy:0.9575 Error: 0.03626 Loss:0.02001 Threads: 8 Forward time: 5.07s Backward time: 3.27s Step time: 3.68s\n", - "1978896 Examples seen. Accuracy:0.9575 Error: 0.11398 Loss:0.14554 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.64s\n", - "1979536 Examples seen. Accuracy:0.9568 Error: 0.11191 Loss:0.08264 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.67s\n", - "1980176 Examples seen. Accuracy:0.9575 Error: 0.04127 Loss:0.02301 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.67s\n", - "1980816 Examples seen. Accuracy:0.9566 Error: 0.10878 Loss:0.14415 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1981456 Examples seen. Accuracy:0.9571 Error: 0.17045 Loss:0.24951 Threads: 8 Forward time: 5.10s Backward time: 3.21s Step time: 3.70s\n", - "1982096 Examples seen. Accuracy:0.9572 Error: 0.11920 Loss:0.13092 Threads: 8 Forward time: 5.10s Backward time: 3.22s Step time: 3.69s\n", - "1982736 Examples seen. Accuracy:0.9563 Error: 0.11615 Loss:0.12482 Threads: 8 Forward time: 5.12s Backward time: 3.28s Step time: 3.70s\n", - "1983376 Examples seen. Accuracy:0.9565 Error: 0.11549 Loss:0.10239 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.71s\n", - "1984016 Examples seen. Accuracy:0.9550 Error: 0.16654 Loss:0.23019 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.67s\n", - "1984656 Examples seen. Accuracy:0.9551 Error: 0.08989 Loss:0.05566 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.71s\n", - "1985296 Examples seen. Accuracy:0.9553 Error: 0.12084 Loss:0.17785 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.69s\n", - "1985936 Examples seen. Accuracy:0.9548 Error: 0.13153 Loss:0.12217 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.69s\n", - "1986576 Examples seen. Accuracy:0.9553 Error: 0.19813 Loss:0.16785 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.71s\n", - "1987216 Examples seen. Accuracy:0.9565 Error: 0.05785 Loss:0.03622 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "1987856 Examples seen. Accuracy:0.9557 Error: 0.09124 Loss:0.07761 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.67s\n", - "1988496 Examples seen. Accuracy:0.9537 Error: 0.16184 Loss:0.15157 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.68s\n", - "1989136 Examples seen. Accuracy:0.9538 Error: 0.14168 Loss:0.16176 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.69s\n", - "1989776 Examples seen. Accuracy:0.9537 Error: 0.09993 Loss:0.08669 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.67s\n", - "1990416 Examples seen. Accuracy:0.9548 Error: 0.13575 Loss:0.14654 Threads: 8 Forward time: 5.04s Backward time: 3.21s Step time: 3.67s\n", - "1991056 Examples seen. Accuracy:0.9562 Error: 0.12144 Loss:0.12310 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.67s\n", - "1991696 Examples seen. Accuracy:0.9563 Error: 0.06950 Loss:0.10158 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.71s\n", - "1992336 Examples seen. Accuracy:0.9563 Error: 0.11919 Loss:0.11302 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.74s\n", - "1992976 Examples seen. Accuracy:0.9565 Error: 0.10072 Loss:0.09323 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.74s\n", - "1993616 Examples seen. Accuracy:0.9569 Error: 0.05778 Loss:0.03738 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.68s\n", - "1994256 Examples seen. Accuracy:0.9577 Error: 0.11338 Loss:0.17860 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "1994896 Examples seen. Accuracy:0.9570 Error: 0.09030 Loss:0.06168 Threads: 8 Forward time: 5.06s Backward time: 3.22s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1995536 Examples seen. Accuracy:0.9584 Error: 0.15258 Loss:0.10264 Threads: 8 Forward time: 5.01s Backward time: 3.18s Step time: 3.66s\n", - "Starting Validation.\n", - "Epochs: 40 Examples seen:1996160 Validation Accuracy: 0.9833 Validation Error: 0.0438 Validation Loss: 0.0430 Total time: 216.43min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.454 Min Weight: -0.354 Max Output: 6.081 Min Output: -5.892 TNNetConvolutionLinear 66,66,64 Times: 8.56s 0.40s Parent:0\n", - "Layer 2 Max Output: 6.081 Min Output: -3.439 TNNetMaxPool 33,33,64 Times: 3.62s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.640 Min Weight: 0.237 Max Output: 9.128 Min Output: -5.741 TNNetMovingStdNormalization 33,33,64 Times: 0.30s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.415 Min Weight: -0.223 Max Output: 11.363 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.83s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.385 Min Weight: -0.340 Max Output: 11.486 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.85s 0.18s Parent:4\n", - "Layer 6 Max Output: 11.486 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.50s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.435 Min Weight: -0.271 Max Output: 8.026 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.42s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.271 Min Weight: -0.236 Max Output: 6.715 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.49s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.244 Min Weight: -0.216 Max Output: 12.481 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.44s 0.02s Parent:8\n", - "Layer 10 Max Output: 12.481 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 12.481 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.04s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.387 Min Weight: -0.402 Max Output: 36.226 Min Output: -16.666 TNNetFullConnectLinear 39,1,1 Times: 0.03s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Starting Testing.\n", - "Epochs: 40 Examples seen:1996160 Test Accuracy: 0.9877 Test Error: 0.0364 Test Loss: 0.0344 Total time: 216.91min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 40. Working time: 3.62 hours.\n", - "Learning rate set to:0.00067\n", - "1996800 Examples seen. Accuracy:0.9603 Error: 0.01858 Loss:0.01055 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.79s\n", - "1997440 Examples seen. Accuracy:0.9612 Error: 0.06751 Loss:0.04420 Threads: 8 Forward time: 5.05s Backward time: 3.20s Step time: 3.70s\n", - "1998080 Examples seen. Accuracy:0.9634 Error: 0.05969 Loss:0.03811 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.66s\n", - "1998720 Examples seen. Accuracy:0.9636 Error: 0.06211 Loss:0.05980 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.66s\n", - "1999360 Examples seen. Accuracy:0.9627 Error: 0.19253 Loss:0.23533 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.79s\n", - "2000000 Examples seen. Accuracy:0.9621 Error: 0.05609 Loss:0.08227 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.69s\n", - "2000640 Examples seen. Accuracy:0.9602 Error: 0.09609 Loss:0.25120 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.69s\n", - "2001280 Examples seen. Accuracy:0.9606 Error: 0.10851 Loss:0.09629 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.86s\n", - "2001920 Examples seen. Accuracy:0.9596 Error: 0.09719 Loss:0.07179 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.70s\n", - "2002560 Examples seen. Accuracy:0.9606 Error: 0.17319 Loss:0.14447 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.72s\n", - "2003200 Examples seen. Accuracy:0.9601 Error: 0.07813 Loss:0.05612 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.69s\n", - "2003840 Examples seen. Accuracy:0.9610 Error: 0.06347 Loss:0.04018 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.69s\n", - "2004480 Examples seen. Accuracy:0.9611 Error: 0.16046 Loss:0.12981 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.74s\n", - "2005120 Examples seen. Accuracy:0.9615 Error: 0.08533 Loss:0.06469 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.74s\n", - "2005760 Examples seen. Accuracy:0.9624 Error: 0.12167 Loss:0.13669 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.68s\n", - "2006400 Examples seen. Accuracy:0.9635 Error: 0.07194 Loss:0.04741 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.73s\n", - "2007040 Examples seen. Accuracy:0.9637 Error: 0.11450 Loss:0.10282 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.74s\n", - "2007680 Examples seen. Accuracy:0.9641 Error: 0.09521 Loss:0.14021 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.65s\n", - "2008320 Examples seen. Accuracy:0.9639 Error: 0.05709 Loss:0.04800 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.63s\n", - "2008960 Examples seen. Accuracy:0.9639 Error: 0.08175 Loss:0.06516 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2009600 Examples seen. Accuracy:0.9637 Error: 0.15362 Loss:0.18719 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.68s\n", - "2010240 Examples seen. Accuracy:0.9632 Error: 0.11108 Loss:0.08708 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.72s\n", - "2010880 Examples seen. Accuracy:0.9632 Error: 0.04366 Loss:0.06930 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "2011520 Examples seen. Accuracy:0.9615 Error: 0.11517 Loss:0.13926 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.66s\n", - "2012160 Examples seen. Accuracy:0.9600 Error: 0.09046 Loss:0.10264 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.62s\n", - "2012800 Examples seen. Accuracy:0.9592 Error: 0.16845 Loss:0.19150 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.62s\n", - "2013440 Examples seen. Accuracy:0.9588 Error: 0.04481 Loss:0.03457 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.62s\n", - "2014080 Examples seen. Accuracy:0.9599 Error: 0.13315 Loss:0.09594 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2014720 Examples seen. Accuracy:0.9586 Error: 0.13077 Loss:0.10909 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.64s\n", - "2015360 Examples seen. Accuracy:0.9598 Error: 0.09163 Loss:0.07449 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.75s\n", - "2016000 Examples seen. Accuracy:0.9592 Error: 0.07351 Loss:0.06709 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.73s\n", - "2016640 Examples seen. Accuracy:0.9590 Error: 0.10505 Loss:0.08804 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.70s\n", - "2017280 Examples seen. Accuracy:0.9602 Error: 0.06601 Loss:0.04553 Threads: 8 Forward time: 5.23s Backward time: 3.37s Step time: 3.85s\n", - "2017920 Examples seen. Accuracy:0.9598 Error: 0.12744 Loss:0.14823 Threads: 8 Forward time: 5.10s Backward time: 3.23s Step time: 3.78s\n", - "2018560 Examples seen. Accuracy:0.9596 Error: 0.16724 Loss:0.20206 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.82s\n", - "2019200 Examples seen. Accuracy:0.9599 Error: 0.06914 Loss:0.07514 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "2019840 Examples seen. Accuracy:0.9611 Error: 0.14347 Loss:0.16205 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.67s\n", - "2020480 Examples seen. Accuracy:0.9614 Error: 0.07310 Loss:0.06299 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.66s\n", - "2021120 Examples seen. Accuracy:0.9615 Error: 0.12056 Loss:0.09817 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.69s\n", - "2021760 Examples seen. Accuracy:0.9614 Error: 0.04295 Loss:0.02542 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2022400 Examples seen. Accuracy:0.9617 Error: 0.08384 Loss:0.15203 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2023040 Examples seen. Accuracy:0.9616 Error: 0.14660 Loss:0.15672 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.67s\n", - "2023680 Examples seen. Accuracy:0.9625 Error: 0.10610 Loss:0.15967 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.66s\n", - "2024320 Examples seen. Accuracy:0.9620 Error: 0.10287 Loss:0.11911 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.67s\n", - "2024960 Examples seen. Accuracy:0.9626 Error: 0.04699 Loss:0.02923 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "2025600 Examples seen. Accuracy:0.9624 Error: 0.10360 Loss:0.11218 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.70s\n", - "2026240 Examples seen. Accuracy:0.9625 Error: 0.07975 Loss:0.05880 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.68s\n", - "2026880 Examples seen. Accuracy:0.9618 Error: 0.06190 Loss:0.03965 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.69s\n", - "2027520 Examples seen. Accuracy:0.9614 Error: 0.08094 Loss:0.05676 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.66s\n", - "2028160 Examples seen. Accuracy:0.9609 Error: 0.11550 Loss:0.11287 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.64s\n", - "2028800 Examples seen. Accuracy:0.9601 Error: 0.15215 Loss:0.22587 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2029440 Examples seen. Accuracy:0.9597 Error: 0.14517 Loss:0.12190 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.68s\n", - "2030080 Examples seen. Accuracy:0.9593 Error: 0.15409 Loss:0.10977 Threads: 8 Forward time: 4.95s Backward time: 3.16s Step time: 3.69s\n", - "2030720 Examples seen. Accuracy:0.9590 Error: 0.09567 Loss:0.11963 Threads: 8 Forward time: 4.96s Backward time: 3.17s Step time: 3.63s\n", - "2031360 Examples seen. Accuracy:0.9598 Error: 0.11987 Loss:0.08958 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.67s\n", - "2032000 Examples seen. Accuracy:0.9595 Error: 0.14072 Loss:0.22758 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.64s\n", - "2032640 Examples seen. Accuracy:0.9597 Error: 0.09160 Loss:0.07322 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2033280 Examples seen. Accuracy:0.9592 Error: 0.11376 Loss:0.09755 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.64s\n", - "2033920 Examples seen. Accuracy:0.9605 Error: 0.10646 Loss:0.16786 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.65s\n", - "2034560 Examples seen. Accuracy:0.9605 Error: 0.19598 Loss:0.17189 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.69s\n", - "2035200 Examples seen. Accuracy:0.9597 Error: 0.12555 Loss:0.14887 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "2035840 Examples seen. Accuracy:0.9600 Error: 0.09475 Loss:0.07416 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.68s\n", - "2036480 Examples seen. Accuracy:0.9592 Error: 0.10563 Loss:0.10907 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.65s\n", - "2037120 Examples seen. Accuracy:0.9594 Error: 0.08218 Loss:0.05294 Threads: 8 Forward time: 5.06s Backward time: 3.21s Step time: 3.65s\n", - "2037760 Examples seen. Accuracy:0.9606 Error: 0.10311 Loss:0.11156 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.65s\n", - "2038400 Examples seen. Accuracy:0.9602 Error: 0.12782 Loss:0.10691 Threads: 8 Forward time: 4.93s Backward time: 3.19s Step time: 3.64s\n", - "2039040 Examples seen. Accuracy:0.9596 Error: 0.06344 Loss:0.04305 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2039680 Examples seen. Accuracy:0.9605 Error: 0.06420 Loss:0.03667 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.68s\n", - "2040320 Examples seen. Accuracy:0.9619 Error: 0.08777 Loss:0.07223 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "2040960 Examples seen. Accuracy:0.9613 Error: 0.08049 Loss:0.08330 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.70s\n", - "2041600 Examples seen. Accuracy:0.9617 Error: 0.03486 Loss:0.02029 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "2042240 Examples seen. Accuracy:0.9610 Error: 0.16104 Loss:0.19287 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.64s\n", - "2042880 Examples seen. Accuracy:0.9602 Error: 0.06730 Loss:0.05337 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2043520 Examples seen. Accuracy:0.9598 Error: 0.16222 Loss:0.17415 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "2044160 Examples seen. Accuracy:0.9611 Error: 0.12666 Loss:0.16967 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.68s\n", - "2044800 Examples seen. Accuracy:0.9598 Error: 0.16521 Loss:0.15240 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.69s\n", - "2045440 Examples seen. Accuracy:0.9608 Error: 0.13204 Loss:0.15356 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.71s\n", - "Starting Validation.\n", - "Epochs: 41 Examples seen:2046064 Validation Accuracy: 0.9841 Validation Error: 0.0429 Validation Loss: 0.0413 Total time: 222.17min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 41. Working time: 3.7 hours.\n", - "2046704 Examples seen. Accuracy:0.9610 Error: 0.06388 Loss:0.04359 Threads: 8 Forward time: 5.06s Backward time: 3.28s Step time: 4.05s\n", - "2047344 Examples seen. Accuracy:0.9617 Error: 0.06283 Loss:0.05038 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.74s\n", - "2047984 Examples seen. Accuracy:0.9607 Error: 0.09704 Loss:0.09312 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "2048624 Examples seen. Accuracy:0.9609 Error: 0.10587 Loss:0.07870 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.66s\n", - "2049264 Examples seen. Accuracy:0.9606 Error: 0.07583 Loss:0.06637 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.65s\n", - "2049904 Examples seen. Accuracy:0.9603 Error: 0.05833 Loss:0.04777 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.65s\n", - "2050544 Examples seen. Accuracy:0.9614 Error: 0.09262 Loss:0.07276 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.62s\n", - "2051184 Examples seen. Accuracy:0.9608 Error: 0.14598 Loss:0.14171 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "2051824 Examples seen. Accuracy:0.9611 Error: 0.07429 Loss:0.07359 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.64s\n", - "2052464 Examples seen. Accuracy:0.9614 Error: 0.05238 Loss:0.03611 Threads: 8 Forward time: 5.12s Backward time: 3.33s Step time: 3.67s\n", - "2053104 Examples seen. Accuracy:0.9606 Error: 0.12459 Loss:0.12483 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.68s\n", - "2053744 Examples seen. Accuracy:0.9610 Error: 0.06927 Loss:0.05665 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.70s\n", - "2054384 Examples seen. Accuracy:0.9605 Error: 0.16545 Loss:0.15736 Threads: 8 Forward time: 5.35s Backward time: 3.35s Step time: 4.03s\n", - "2055024 Examples seen. Accuracy:0.9603 Error: 0.08296 Loss:0.06288 Threads: 8 Forward time: 5.08s Backward time: 3.22s Step time: 3.80s\n", - "2055664 Examples seen. Accuracy:0.9608 Error: 0.12343 Loss:0.10977 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.73s\n", - "2056304 Examples seen. Accuracy:0.9615 Error: 0.05542 Loss:0.03797 Threads: 8 Forward time: 5.24s Backward time: 3.30s Step time: 3.74s\n", - "2056944 Examples seen. Accuracy:0.9621 Error: 0.12744 Loss:0.20415 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.76s\n", - "2057584 Examples seen. Accuracy:0.9617 Error: 0.13120 Loss:0.19197 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.72s\n", - "2058224 Examples seen. Accuracy:0.9623 Error: 0.08980 Loss:0.06216 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.75s\n", - "2058864 Examples seen. Accuracy:0.9612 Error: 0.06868 Loss:0.05070 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.76s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2059504 Examples seen. Accuracy:0.9610 Error: 0.01298 Loss:0.00672 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.79s\n", - "2060144 Examples seen. Accuracy:0.9611 Error: 0.07902 Loss:0.05557 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.77s\n", - "2060784 Examples seen. Accuracy:0.9606 Error: 0.12193 Loss:0.10071 Threads: 8 Forward time: 5.18s Backward time: 3.34s Step time: 3.77s\n", - "2061424 Examples seen. Accuracy:0.9592 Error: 0.09883 Loss:0.13209 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.71s\n", - "2062064 Examples seen. Accuracy:0.9602 Error: 0.07230 Loss:0.04877 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.70s\n", - "2062704 Examples seen. Accuracy:0.9609 Error: 0.12590 Loss:0.18081 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.75s\n", - "2063344 Examples seen. Accuracy:0.9612 Error: 0.11088 Loss:0.17661 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2063984 Examples seen. Accuracy:0.9609 Error: 0.04687 Loss:0.02792 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.67s\n", - "2064624 Examples seen. Accuracy:0.9607 Error: 0.11287 Loss:0.13627 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.74s\n", - "2065264 Examples seen. Accuracy:0.9607 Error: 0.03670 Loss:0.02258 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.67s\n", - "2065904 Examples seen. Accuracy:0.9599 Error: 0.13738 Loss:0.25287 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.64s\n", - "2066544 Examples seen. Accuracy:0.9607 Error: 0.16876 Loss:0.19209 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "2067184 Examples seen. Accuracy:0.9597 Error: 0.13200 Loss:0.13405 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "2067824 Examples seen. Accuracy:0.9604 Error: 0.09032 Loss:0.07258 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.67s\n", - "2068464 Examples seen. Accuracy:0.9610 Error: 0.08576 Loss:0.06849 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.67s\n", - "2069104 Examples seen. Accuracy:0.9608 Error: 0.17692 Loss:0.14961 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2069744 Examples seen. Accuracy:0.9602 Error: 0.12313 Loss:0.10311 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.70s\n", - "2070384 Examples seen. Accuracy:0.9601 Error: 0.11695 Loss:0.15260 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.70s\n", - "2071024 Examples seen. Accuracy:0.9614 Error: 0.14680 Loss:0.10326 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2071664 Examples seen. Accuracy:0.9619 Error: 0.07119 Loss:0.04497 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "2072304 Examples seen. Accuracy:0.9616 Error: 0.08803 Loss:0.05972 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.63s\n", - "2072944 Examples seen. Accuracy:0.9613 Error: 0.09678 Loss:0.17543 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.66s\n", - "2073584 Examples seen. Accuracy:0.9613 Error: 0.07737 Loss:0.05816 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2074224 Examples seen. Accuracy:0.9614 Error: 0.12186 Loss:0.10699 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.70s\n", - "2074864 Examples seen. Accuracy:0.9619 Error: 0.16597 Loss:0.18705 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "2075504 Examples seen. Accuracy:0.9621 Error: 0.15845 Loss:0.13995 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.63s\n", - "2076144 Examples seen. Accuracy:0.9610 Error: 0.04733 Loss:0.05108 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.67s\n", - "2076784 Examples seen. Accuracy:0.9599 Error: 0.14905 Loss:0.18632 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.64s\n", - "2077424 Examples seen. Accuracy:0.9604 Error: 0.14534 Loss:0.15695 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "2078064 Examples seen. Accuracy:0.9620 Error: 0.06016 Loss:0.05733 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "2078704 Examples seen. Accuracy:0.9624 Error: 0.08540 Loss:0.05417 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "2079344 Examples seen. Accuracy:0.9617 Error: 0.18419 Loss:0.21919 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2079984 Examples seen. Accuracy:0.9606 Error: 0.11250 Loss:0.15092 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.65s\n", - "2080624 Examples seen. Accuracy:0.9590 Error: 0.09583 Loss:0.08303 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "2081264 Examples seen. Accuracy:0.9598 Error: 0.07459 Loss:0.04854 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "2081904 Examples seen. Accuracy:0.9592 Error: 0.09017 Loss:0.09750 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "2082544 Examples seen. Accuracy:0.9598 Error: 0.04355 Loss:0.02540 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.63s\n", - "2083184 Examples seen. Accuracy:0.9609 Error: 0.05814 Loss:0.04025 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.63s\n", - "2083824 Examples seen. Accuracy:0.9606 Error: 0.03511 Loss:0.02003 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.68s\n", - "2084464 Examples seen. Accuracy:0.9595 Error: 0.09694 Loss:0.06281 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.66s\n", - "2085104 Examples seen. Accuracy:0.9592 Error: 0.17229 Loss:0.23004 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.66s\n", - "2085744 Examples seen. Accuracy:0.9602 Error: 0.11241 Loss:0.11904 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.64s\n", - "2086384 Examples seen. Accuracy:0.9602 Error: 0.05241 Loss:0.03136 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.65s\n", - "2087024 Examples seen. Accuracy:0.9607 Error: 0.09882 Loss:0.08587 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.65s\n", - "2087664 Examples seen. Accuracy:0.9599 Error: 0.04972 Loss:0.03560 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.66s\n", - "2088304 Examples seen. Accuracy:0.9598 Error: 0.12225 Loss:0.10739 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.71s\n", - "2088944 Examples seen. Accuracy:0.9598 Error: 0.09113 Loss:0.07337 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.69s\n", - "2089584 Examples seen. Accuracy:0.9601 Error: 0.12270 Loss:0.10179 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.70s\n", - "2090224 Examples seen. Accuracy:0.9600 Error: 0.05132 Loss:0.03167 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.71s\n", - "2090864 Examples seen. Accuracy:0.9591 Error: 0.08323 Loss:0.06303 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "2091504 Examples seen. Accuracy:0.9585 Error: 0.10813 Loss:0.13794 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.68s\n", - "2092144 Examples seen. Accuracy:0.9590 Error: 0.09136 Loss:0.09140 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.67s\n", - "2092784 Examples seen. Accuracy:0.9586 Error: 0.09629 Loss:0.11328 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.69s\n", - "2093424 Examples seen. Accuracy:0.9581 Error: 0.12296 Loss:0.15214 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "2094064 Examples seen. Accuracy:0.9571 Error: 0.12320 Loss:0.11565 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.71s\n", - "2094704 Examples seen. Accuracy:0.9574 Error: 0.10587 Loss:0.12594 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.72s\n", - "2095344 Examples seen. Accuracy:0.9571 Error: 0.10469 Loss:0.08153 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.69s\n", - "Starting Validation.\n", - "Epochs: 42 Examples seen:2095968 Validation Accuracy: 0.9837 Validation Error: 0.0430 Validation Loss: 0.0415 Total time: 227.45min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 42. Working time: 3.79 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2096608 Examples seen. Accuracy:0.9573 Error: 0.11755 Loss:0.12870 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.74s\n", - "2097248 Examples seen. Accuracy:0.9575 Error: 0.11296 Loss:0.11223 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.67s\n", - "2097888 Examples seen. Accuracy:0.9582 Error: 0.07669 Loss:0.06412 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "2098528 Examples seen. Accuracy:0.9583 Error: 0.09211 Loss:0.09909 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.66s\n", - "2099168 Examples seen. Accuracy:0.9576 Error: 0.06681 Loss:0.04023 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "2099808 Examples seen. Accuracy:0.9586 Error: 0.05375 Loss:0.03276 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "2100448 Examples seen. Accuracy:0.9588 Error: 0.14109 Loss:0.33881 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2101088 Examples seen. Accuracy:0.9596 Error: 0.15976 Loss:0.16914 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.65s\n", - "2101728 Examples seen. Accuracy:0.9611 Error: 0.08281 Loss:0.06811 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2102368 Examples seen. Accuracy:0.9614 Error: 0.10608 Loss:0.10758 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.65s\n", - "2103008 Examples seen. Accuracy:0.9616 Error: 0.05203 Loss:0.04243 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.63s\n", - "2103648 Examples seen. Accuracy:0.9609 Error: 0.14012 Loss:0.13503 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2104288 Examples seen. Accuracy:0.9618 Error: 0.07116 Loss:0.06829 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "2104928 Examples seen. Accuracy:0.9614 Error: 0.10297 Loss:0.07590 Threads: 8 Forward time: 4.89s Backward time: 3.25s Step time: 3.62s\n", - "2105568 Examples seen. Accuracy:0.9609 Error: 0.02785 Loss:0.01627 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.64s\n", - "2106208 Examples seen. Accuracy:0.9605 Error: 0.18635 Loss:0.20010 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.61s\n", - "2106848 Examples seen. Accuracy:0.9605 Error: 0.07878 Loss:0.07617 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.62s\n", - "2107488 Examples seen. Accuracy:0.9617 Error: 0.04060 Loss:0.03705 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.62s\n", - "2108128 Examples seen. Accuracy:0.9624 Error: 0.06737 Loss:0.11918 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2108768 Examples seen. Accuracy:0.9627 Error: 0.09881 Loss:0.06549 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2109408 Examples seen. Accuracy:0.9617 Error: 0.08038 Loss:0.06482 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.62s\n", - "2110048 Examples seen. Accuracy:0.9622 Error: 0.04157 Loss:0.02704 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.63s\n", - "2110688 Examples seen. Accuracy:0.9615 Error: 0.05347 Loss:0.03057 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.66s\n", - "2111328 Examples seen. Accuracy:0.9615 Error: 0.03442 Loss:0.07377 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.63s\n", - "2111968 Examples seen. Accuracy:0.9606 Error: 0.09026 Loss:0.06736 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.63s\n", - "2112608 Examples seen. Accuracy:0.9612 Error: 0.09052 Loss:0.21152 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "2113248 Examples seen. Accuracy:0.9609 Error: 0.18786 Loss:0.20536 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.66s\n", - "2113888 Examples seen. Accuracy:0.9603 Error: 0.13745 Loss:0.17095 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.64s\n", - "2114528 Examples seen. Accuracy:0.9607 Error: 0.15790 Loss:0.20883 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.66s\n", - "2115168 Examples seen. Accuracy:0.9595 Error: 0.14595 Loss:0.11775 Threads: 8 Forward time: 4.93s Backward time: 3.19s Step time: 3.60s\n", - "2115808 Examples seen. Accuracy:0.9602 Error: 0.07886 Loss:0.07036 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.67s\n", - "2116448 Examples seen. Accuracy:0.9589 Error: 0.13103 Loss:0.11583 Threads: 8 Forward time: 5.15s Backward time: 3.29s Step time: 3.75s\n", - "2117088 Examples seen. Accuracy:0.9574 Error: 0.17012 Loss:0.18372 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.73s\n", - "2117728 Examples seen. Accuracy:0.9573 Error: 0.09321 Loss:0.17361 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.62s\n", - "2118368 Examples seen. Accuracy:0.9583 Error: 0.12010 Loss:0.10161 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.59s\n", - "2119008 Examples seen. Accuracy:0.9581 Error: 0.13168 Loss:0.27239 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.61s\n", - "2119648 Examples seen. Accuracy:0.9594 Error: 0.05126 Loss:0.02962 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.61s\n", - "2120288 Examples seen. Accuracy:0.9589 Error: 0.12949 Loss:0.14341 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.62s\n", - "2120928 Examples seen. Accuracy:0.9587 Error: 0.16659 Loss:0.12274 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "2121568 Examples seen. Accuracy:0.9588 Error: 0.16076 Loss:0.22513 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.60s\n", - "2122208 Examples seen. Accuracy:0.9597 Error: 0.06150 Loss:0.03584 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.62s\n", - "2122848 Examples seen. Accuracy:0.9590 Error: 0.11596 Loss:0.16469 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2123488 Examples seen. Accuracy:0.9592 Error: 0.08563 Loss:0.11143 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.63s\n", - "2124128 Examples seen. Accuracy:0.9602 Error: 0.09218 Loss:0.07035 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.61s\n", - "2124768 Examples seen. Accuracy:0.9593 Error: 0.06447 Loss:0.04488 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "2125408 Examples seen. Accuracy:0.9598 Error: 0.05466 Loss:0.10201 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.67s\n", - "2126048 Examples seen. Accuracy:0.9608 Error: 0.12386 Loss:0.17441 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.63s\n", - "2126688 Examples seen. Accuracy:0.9615 Error: 0.11161 Loss:0.11010 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.61s\n", - "2127328 Examples seen. Accuracy:0.9623 Error: 0.03152 Loss:0.01665 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2127968 Examples seen. Accuracy:0.9629 Error: 0.09988 Loss:0.06874 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.64s\n", - "2128608 Examples seen. Accuracy:0.9612 Error: 0.09593 Loss:0.06687 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "2129248 Examples seen. Accuracy:0.9611 Error: 0.09015 Loss:0.08932 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.62s\n", - "2129888 Examples seen. Accuracy:0.9605 Error: 0.10368 Loss:0.11767 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.64s\n", - "2130528 Examples seen. Accuracy:0.9597 Error: 0.14479 Loss:0.23093 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.62s\n", - "2131168 Examples seen. Accuracy:0.9611 Error: 0.08661 Loss:0.10253 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.61s\n", - "2131808 Examples seen. Accuracy:0.9609 Error: 0.08236 Loss:0.12978 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.64s\n", - "2132448 Examples seen. Accuracy:0.9610 Error: 0.15831 Loss:0.14996 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.62s\n", - "2133088 Examples seen. Accuracy:0.9612 Error: 0.08443 Loss:0.07349 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.61s\n", - "2133728 Examples seen. Accuracy:0.9619 Error: 0.06208 Loss:0.04203 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2134368 Examples seen. Accuracy:0.9627 Error: 0.13095 Loss:0.13919 Threads: 8 Forward time: 5.35s Backward time: 3.40s Step time: 3.78s\n", - "2135008 Examples seen. Accuracy:0.9622 Error: 0.12744 Loss:0.11882 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.96s\n", - "2135648 Examples seen. Accuracy:0.9629 Error: 0.13301 Loss:0.19207 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.67s\n", - "2136288 Examples seen. Accuracy:0.9637 Error: 0.06881 Loss:0.08541 Threads: 8 Forward time: 5.14s Backward time: 3.27s Step time: 3.68s\n", - "2136928 Examples seen. Accuracy:0.9635 Error: 0.09436 Loss:0.08244 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.66s\n", - "2137568 Examples seen. Accuracy:0.9622 Error: 0.12418 Loss:0.11643 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2138208 Examples seen. Accuracy:0.9605 Error: 0.17934 Loss:0.17309 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2138848 Examples seen. Accuracy:0.9615 Error: 0.12980 Loss:0.12131 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.65s\n", - "2139488 Examples seen. Accuracy:0.9608 Error: 0.09891 Loss:0.08197 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.72s\n", - "2140128 Examples seen. Accuracy:0.9602 Error: 0.13142 Loss:0.13997 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.71s\n", - "2140768 Examples seen. Accuracy:0.9604 Error: 0.18337 Loss:0.22748 Threads: 8 Forward time: 5.08s Backward time: 3.24s Step time: 3.73s\n", - "2141408 Examples seen. Accuracy:0.9607 Error: 0.11484 Loss:0.10168 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.77s\n", - "2142048 Examples seen. Accuracy:0.9602 Error: 0.05541 Loss:0.06526 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.75s\n", - "2142688 Examples seen. Accuracy:0.9616 Error: 0.02845 Loss:0.01583 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.73s\n", - "2143328 Examples seen. Accuracy:0.9608 Error: 0.10429 Loss:0.10522 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.69s\n", - "2143968 Examples seen. Accuracy:0.9612 Error: 0.11446 Loss:0.24302 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.72s\n", - "2144608 Examples seen. Accuracy:0.9613 Error: 0.13311 Loss:0.12533 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.71s\n", - "2145248 Examples seen. Accuracy:0.9606 Error: 0.16251 Loss:0.17121 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.65s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 43 Examples seen:2145872 Validation Accuracy: 0.9855 Validation Error: 0.0417 Validation Loss: 0.0411 Total time: 232.68min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 43. Working time: 3.88 hours.\n", - "2146512 Examples seen. Accuracy:0.9604 Error: 0.11870 Loss:0.09435 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.82s\n", - "2147152 Examples seen. Accuracy:0.9606 Error: 0.06837 Loss:0.06857 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.70s\n", - "2147792 Examples seen. Accuracy:0.9609 Error: 0.08509 Loss:0.09438 Threads: 8 Forward time: 5.07s Backward time: 3.21s Step time: 3.72s\n", - "2148432 Examples seen. Accuracy:0.9607 Error: 0.13966 Loss:0.12524 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "2149072 Examples seen. Accuracy:0.9597 Error: 0.11357 Loss:0.11758 Threads: 8 Forward time: 5.07s Backward time: 3.20s Step time: 3.68s\n", - "2149712 Examples seen. Accuracy:0.9584 Error: 0.11924 Loss:0.08772 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.67s\n", - "2150352 Examples seen. Accuracy:0.9587 Error: 0.03487 Loss:0.02216 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "2150992 Examples seen. Accuracy:0.9595 Error: 0.06457 Loss:0.06064 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.66s\n", - "2151632 Examples seen. Accuracy:0.9596 Error: 0.07961 Loss:0.06442 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "2152272 Examples seen. Accuracy:0.9595 Error: 0.03298 Loss:0.02337 Threads: 8 Forward time: 4.92s Backward time: 3.18s Step time: 3.65s\n", - "2152912 Examples seen. Accuracy:0.9590 Error: 0.10797 Loss:0.07846 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.65s\n", - "2153552 Examples seen. Accuracy:0.9600 Error: 0.08992 Loss:0.08442 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.66s\n", - "2154192 Examples seen. Accuracy:0.9595 Error: 0.12749 Loss:0.11727 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.65s\n", - "2154832 Examples seen. Accuracy:0.9590 Error: 0.12088 Loss:0.12940 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.67s\n", - "2155472 Examples seen. Accuracy:0.9610 Error: 0.03941 Loss:0.02266 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.66s\n", - "2156112 Examples seen. Accuracy:0.9614 Error: 0.09451 Loss:0.06711 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "2156752 Examples seen. Accuracy:0.9618 Error: 0.09939 Loss:0.17680 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.67s\n", - "2157392 Examples seen. Accuracy:0.9611 Error: 0.08817 Loss:0.06361 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.65s\n", - "2158032 Examples seen. Accuracy:0.9606 Error: 0.21197 Loss:0.32624 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.70s\n", - "2158672 Examples seen. Accuracy:0.9612 Error: 0.08602 Loss:0.08977 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.63s\n", - "2159312 Examples seen. Accuracy:0.9620 Error: 0.06605 Loss:0.04316 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.64s\n", - "2159952 Examples seen. Accuracy:0.9640 Error: 0.13262 Loss:0.26982 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.64s\n", - "2160592 Examples seen. Accuracy:0.9651 Error: 0.03522 Loss:0.02577 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.65s\n", - "2161232 Examples seen. Accuracy:0.9660 Error: 0.02488 Loss:0.01370 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.66s\n", - "2161872 Examples seen. Accuracy:0.9670 Error: 0.12088 Loss:0.10219 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2162512 Examples seen. Accuracy:0.9673 Error: 0.08449 Loss:0.05989 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "2163152 Examples seen. Accuracy:0.9676 Error: 0.04382 Loss:0.02866 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.64s\n", - "2163792 Examples seen. Accuracy:0.9678 Error: 0.05929 Loss:0.05157 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.65s\n", - "2164432 Examples seen. Accuracy:0.9678 Error: 0.08612 Loss:0.05610 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.64s\n", - "2165072 Examples seen. Accuracy:0.9687 Error: 0.13214 Loss:0.13166 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.64s\n", - "2165712 Examples seen. Accuracy:0.9685 Error: 0.09036 Loss:0.07031 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.66s\n", - "2166352 Examples seen. Accuracy:0.9684 Error: 0.09283 Loss:0.09278 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.66s\n", - "2166992 Examples seen. Accuracy:0.9673 Error: 0.14268 Loss:0.13038 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.65s\n", - "2167632 Examples seen. Accuracy:0.9659 Error: 0.05910 Loss:0.06256 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.64s\n", - "2168272 Examples seen. Accuracy:0.9663 Error: 0.07668 Loss:0.06164 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2168912 Examples seen. Accuracy:0.9658 Error: 0.07944 Loss:0.08093 Threads: 8 Forward time: 4.96s Backward time: 3.17s Step time: 3.63s\n", - "2169552 Examples seen. Accuracy:0.9655 Error: 0.13784 Loss:0.13705 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.65s\n", - "2170192 Examples seen. Accuracy:0.9648 Error: 0.06838 Loss:0.04370 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2170832 Examples seen. Accuracy:0.9652 Error: 0.04912 Loss:0.03298 Threads: 8 Forward time: 5.04s Backward time: 3.21s Step time: 3.67s\n", - "2171472 Examples seen. Accuracy:0.9651 Error: 0.06095 Loss:0.05595 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.67s\n", - "2172112 Examples seen. Accuracy:0.9649 Error: 0.20185 Loss:0.23184 Threads: 8 Forward time: 5.21s Backward time: 3.32s Step time: 3.72s\n", - "2172752 Examples seen. Accuracy:0.9654 Error: 0.06384 Loss:0.06430 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.72s\n", - "2173392 Examples seen. Accuracy:0.9647 Error: 0.05035 Loss:0.03945 Threads: 8 Forward time: 5.05s Backward time: 3.20s Step time: 3.71s\n", - "2174032 Examples seen. Accuracy:0.9645 Error: 0.12800 Loss:0.11676 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.68s\n", - "2174672 Examples seen. Accuracy:0.9642 Error: 0.17388 Loss:0.52305 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.68s\n", - "2175312 Examples seen. Accuracy:0.9645 Error: 0.09176 Loss:0.07710 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "2175952 Examples seen. Accuracy:0.9638 Error: 0.16468 Loss:0.15757 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.64s\n", - "2176592 Examples seen. Accuracy:0.9636 Error: 0.02155 Loss:0.01151 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.64s\n", - "2177232 Examples seen. Accuracy:0.9635 Error: 0.05643 Loss:0.05546 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2177872 Examples seen. Accuracy:0.9643 Error: 0.05742 Loss:0.03892 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.63s\n", - "2178512 Examples seen. Accuracy:0.9633 Error: 0.07291 Loss:0.08091 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.70s\n", - "2179152 Examples seen. Accuracy:0.9626 Error: 0.10645 Loss:0.10482 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2179792 Examples seen. Accuracy:0.9628 Error: 0.07571 Loss:0.07119 Threads: 8 Forward time: 5.09s Backward time: 3.21s Step time: 3.69s\n", - "2180432 Examples seen. Accuracy:0.9630 Error: 0.03258 Loss:0.02193 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.65s\n", - "2181072 Examples seen. Accuracy:0.9639 Error: 0.04135 Loss:0.02377 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.62s\n", - "2181712 Examples seen. Accuracy:0.9635 Error: 0.05194 Loss:0.05061 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.63s\n", - "2182352 Examples seen. Accuracy:0.9630 Error: 0.14752 Loss:0.14869 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "2182992 Examples seen. Accuracy:0.9623 Error: 0.08928 Loss:0.13221 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "2183632 Examples seen. Accuracy:0.9635 Error: 0.06841 Loss:0.04940 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.63s\n", - "2184272 Examples seen. Accuracy:0.9635 Error: 0.05628 Loss:0.06051 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.63s\n", - "2184912 Examples seen. Accuracy:0.9631 Error: 0.13624 Loss:0.17239 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.62s\n", - "2185552 Examples seen. Accuracy:0.9625 Error: 0.10047 Loss:0.09523 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.65s\n", - "2186192 Examples seen. Accuracy:0.9625 Error: 0.07725 Loss:0.10182 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.60s\n", - "2186832 Examples seen. Accuracy:0.9635 Error: 0.05553 Loss:0.03412 Threads: 8 Forward time: 5.02s Backward time: 3.19s Step time: 3.62s\n", - "2187472 Examples seen. Accuracy:0.9632 Error: 0.09951 Loss:0.14583 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.60s\n", - "2188112 Examples seen. Accuracy:0.9618 Error: 0.17636 Loss:0.19233 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.62s\n", - "2188752 Examples seen. Accuracy:0.9624 Error: 0.06237 Loss:0.07669 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.63s\n", - "2189392 Examples seen. Accuracy:0.9620 Error: 0.07951 Loss:0.17669 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.62s\n", - "2190032 Examples seen. Accuracy:0.9608 Error: 0.07631 Loss:0.05460 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.65s\n", - "2190672 Examples seen. Accuracy:0.9613 Error: 0.04729 Loss:0.03467 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "2191312 Examples seen. Accuracy:0.9616 Error: 0.14495 Loss:0.15853 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.63s\n", - "2191952 Examples seen. Accuracy:0.9606 Error: 0.09179 Loss:0.06952 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.65s\n", - "2192592 Examples seen. Accuracy:0.9620 Error: 0.07045 Loss:0.04203 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "2193232 Examples seen. Accuracy:0.9631 Error: 0.12244 Loss:0.19063 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.64s\n", - "2193872 Examples seen. Accuracy:0.9637 Error: 0.05471 Loss:0.04694 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "2194512 Examples seen. Accuracy:0.9645 Error: 0.06500 Loss:0.05480 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.65s\n", - "2195152 Examples seen. Accuracy:0.9646 Error: 0.11435 Loss:0.16702 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 44 Examples seen:2195776 Validation Accuracy: 0.9848 Validation Error: 0.0415 Validation Loss: 0.0413 Total time: 237.90min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 44. Working time: 3.96 hours.\n", - "2196416 Examples seen. Accuracy:0.9638 Error: 0.06707 Loss:0.05455 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.70s\n", - "2197056 Examples seen. Accuracy:0.9625 Error: 0.13326 Loss:0.13287 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.63s\n", - "2197696 Examples seen. Accuracy:0.9623 Error: 0.05389 Loss:0.03639 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.65s\n", - "2198336 Examples seen. Accuracy:0.9620 Error: 0.13830 Loss:0.13066 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.65s\n", - "2198976 Examples seen. Accuracy:0.9610 Error: 0.13539 Loss:0.21107 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.64s\n", - "2199616 Examples seen. Accuracy:0.9604 Error: 0.08878 Loss:0.06378 Threads: 8 Forward time: 4.99s Backward time: 3.17s Step time: 3.65s\n", - "2200256 Examples seen. Accuracy:0.9606 Error: 0.06904 Loss:0.06060 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "2200896 Examples seen. Accuracy:0.9607 Error: 0.05256 Loss:0.03415 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.67s\n", - "2201536 Examples seen. Accuracy:0.9603 Error: 0.14263 Loss:0.17076 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.68s\n", - "2202176 Examples seen. Accuracy:0.9598 Error: 0.05747 Loss:0.03314 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.72s\n", - "2202816 Examples seen. Accuracy:0.9598 Error: 0.03687 Loss:0.01958 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.65s\n", - "2203456 Examples seen. Accuracy:0.9598 Error: 0.16185 Loss:0.14748 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.66s\n", - "2204096 Examples seen. Accuracy:0.9583 Error: 0.12543 Loss:0.12296 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.69s\n", - "2204736 Examples seen. Accuracy:0.9574 Error: 0.12225 Loss:0.08578 Threads: 8 Forward time: 4.99s Backward time: 3.14s Step time: 3.62s\n", - "2205376 Examples seen. Accuracy:0.9581 Error: 0.08237 Loss:0.08074 Threads: 8 Forward time: 4.98s Backward time: 3.15s Step time: 3.63s\n", - "2206016 Examples seen. Accuracy:0.9584 Error: 0.06354 Loss:0.04355 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "2206656 Examples seen. Accuracy:0.9578 Error: 0.10062 Loss:0.10562 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "2207296 Examples seen. Accuracy:0.9581 Error: 0.11726 Loss:0.12055 Threads: 8 Forward time: 5.01s Backward time: 3.18s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2207936 Examples seen. Accuracy:0.9583 Error: 0.09920 Loss:0.07449 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.67s\n", - "2208576 Examples seen. Accuracy:0.9588 Error: 0.12463 Loss:0.13638 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.66s\n", - "2209216 Examples seen. Accuracy:0.9601 Error: 0.06230 Loss:0.04044 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.71s\n", - "2209856 Examples seen. Accuracy:0.9608 Error: 0.07249 Loss:0.05280 Threads: 8 Forward time: 5.09s Backward time: 3.24s Step time: 3.66s\n", - "2210496 Examples seen. Accuracy:0.9610 Error: 0.10962 Loss:0.09056 Threads: 8 Forward time: 5.10s Backward time: 3.24s Step time: 3.68s\n", - "2211136 Examples seen. Accuracy:0.9606 Error: 0.08151 Loss:0.05317 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.79s\n", - "2211776 Examples seen. Accuracy:0.9597 Error: 0.04749 Loss:0.04379 Threads: 8 Forward time: 4.94s Backward time: 3.18s Step time: 3.61s\n", - "2212416 Examples seen. Accuracy:0.9596 Error: 0.09546 Loss:0.20199 Threads: 8 Forward time: 5.09s Backward time: 3.22s Step time: 3.62s\n", - "2213056 Examples seen. Accuracy:0.9584 Error: 0.04853 Loss:0.03590 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.66s\n", - "2213696 Examples seen. Accuracy:0.9585 Error: 0.17711 Loss:0.21431 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.62s\n", - "2214336 Examples seen. Accuracy:0.9599 Error: 0.07186 Loss:0.07902 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2214976 Examples seen. Accuracy:0.9612 Error: 0.04215 Loss:0.02723 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.64s\n", - "2215616 Examples seen. Accuracy:0.9603 Error: 0.04699 Loss:0.04298 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "2216256 Examples seen. Accuracy:0.9596 Error: 0.11184 Loss:0.11202 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.74s\n", - "2216896 Examples seen. Accuracy:0.9597 Error: 0.04524 Loss:0.03202 Threads: 8 Forward time: 5.15s Backward time: 3.25s Step time: 3.66s\n", - "2217536 Examples seen. Accuracy:0.9594 Error: 0.13752 Loss:0.11988 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2218176 Examples seen. Accuracy:0.9602 Error: 0.09819 Loss:0.07648 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.65s\n", - "2218816 Examples seen. Accuracy:0.9591 Error: 0.13943 Loss:0.11528 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.69s\n", - "2219456 Examples seen. Accuracy:0.9586 Error: 0.12464 Loss:0.10484 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2220096 Examples seen. Accuracy:0.9590 Error: 0.11554 Loss:0.16358 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.64s\n", - "2220736 Examples seen. Accuracy:0.9592 Error: 0.08293 Loss:0.06599 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "2221376 Examples seen. Accuracy:0.9588 Error: 0.13179 Loss:0.12383 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.62s\n", - "2222016 Examples seen. Accuracy:0.9598 Error: 0.08749 Loss:0.09880 Threads: 8 Forward time: 5.06s Backward time: 3.21s Step time: 3.64s\n", - "2222656 Examples seen. Accuracy:0.9603 Error: 0.08564 Loss:0.09390 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.65s\n", - "2223296 Examples seen. Accuracy:0.9596 Error: 0.12061 Loss:0.10053 Threads: 8 Forward time: 5.06s Backward time: 3.30s Step time: 3.73s\n", - "2223936 Examples seen. Accuracy:0.9595 Error: 0.10722 Loss:0.09209 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "2224576 Examples seen. Accuracy:0.9597 Error: 0.05098 Loss:0.02852 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.65s\n", - "2225216 Examples seen. Accuracy:0.9600 Error: 0.09565 Loss:0.06985 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.67s\n", - "2225856 Examples seen. Accuracy:0.9602 Error: 0.09653 Loss:0.12077 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2226496 Examples seen. Accuracy:0.9596 Error: 0.15849 Loss:0.15500 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.67s\n", - "2227136 Examples seen. Accuracy:0.9609 Error: 0.05617 Loss:0.03544 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "2227776 Examples seen. Accuracy:0.9613 Error: 0.05057 Loss:0.03336 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.65s\n", - "2228416 Examples seen. Accuracy:0.9612 Error: 0.05656 Loss:0.03637 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.65s\n", - "2229056 Examples seen. Accuracy:0.9616 Error: 0.11143 Loss:0.09354 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "2229696 Examples seen. Accuracy:0.9625 Error: 0.04199 Loss:0.03019 Threads: 8 Forward time: 4.92s Backward time: 3.19s Step time: 3.63s\n", - "2230336 Examples seen. Accuracy:0.9620 Error: 0.13581 Loss:0.14776 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.64s\n", - "2230976 Examples seen. Accuracy:0.9626 Error: 0.05760 Loss:0.03630 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2231616 Examples seen. Accuracy:0.9623 Error: 0.01874 Loss:0.00998 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.64s\n", - "2232256 Examples seen. Accuracy:0.9619 Error: 0.10529 Loss:0.12723 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.66s\n", - "2232896 Examples seen. Accuracy:0.9610 Error: 0.13822 Loss:0.15331 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.68s\n", - "2233536 Examples seen. Accuracy:0.9607 Error: 0.21609 Loss:0.19170 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.66s\n", - "2234176 Examples seen. Accuracy:0.9610 Error: 0.10512 Loss:0.08462 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.72s\n", - "2234816 Examples seen. Accuracy:0.9624 Error: 0.11837 Loss:0.13793 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "2235456 Examples seen. Accuracy:0.9635 Error: 0.05210 Loss:0.03221 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.68s\n", - "2236096 Examples seen. Accuracy:0.9647 Error: 0.10461 Loss:0.07700 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.65s\n", - "2236736 Examples seen. Accuracy:0.9634 Error: 0.10867 Loss:0.10829 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.62s\n", - "2237376 Examples seen. Accuracy:0.9640 Error: 0.04601 Loss:0.05502 Threads: 8 Forward time: 5.12s Backward time: 3.28s Step time: 3.69s\n", - "2238016 Examples seen. Accuracy:0.9639 Error: 0.16007 Loss:0.15163 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "2238656 Examples seen. Accuracy:0.9632 Error: 0.16099 Loss:0.15999 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "2239296 Examples seen. Accuracy:0.9636 Error: 0.04550 Loss:0.02949 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.65s\n", - "2239936 Examples seen. Accuracy:0.9625 Error: 0.06603 Loss:0.04459 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.65s\n", - "2240576 Examples seen. Accuracy:0.9625 Error: 0.11153 Loss:0.12012 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "2241216 Examples seen. Accuracy:0.9611 Error: 0.15451 Loss:0.14785 Threads: 8 Forward time: 4.93s Backward time: 3.18s Step time: 3.64s\n", - "2241856 Examples seen. Accuracy:0.9610 Error: 0.14070 Loss:0.14935 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.64s\n", - "2242496 Examples seen. Accuracy:0.9615 Error: 0.04407 Loss:0.02899 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "2243136 Examples seen. Accuracy:0.9617 Error: 0.11429 Loss:0.08094 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "2243776 Examples seen. Accuracy:0.9622 Error: 0.04540 Loss:0.02638 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.66s\n", - "2244416 Examples seen. Accuracy:0.9615 Error: 0.10046 Loss:0.09142 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.76s\n", - "2245056 Examples seen. Accuracy:0.9605 Error: 0.09301 Loss:0.07046 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "Epochs: 45 Examples seen:2245680 Validation Accuracy: 0.9844 Validation Error: 0.0412 Validation Loss: 0.0409 Total time: 243.13min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 45. Working time: 4.05 hours.\n", - "2246320 Examples seen. Accuracy:0.9590 Error: 0.13287 Loss:0.14000 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.77s\n", - "2246960 Examples seen. Accuracy:0.9602 Error: 0.07393 Loss:0.04641 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2247600 Examples seen. Accuracy:0.9611 Error: 0.04645 Loss:0.02693 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.70s\n", - "2248240 Examples seen. Accuracy:0.9608 Error: 0.06798 Loss:0.04907 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.65s\n", - "2248880 Examples seen. Accuracy:0.9604 Error: 0.09797 Loss:0.16991 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.66s\n", - "2249520 Examples seen. Accuracy:0.9612 Error: 0.08461 Loss:0.05634 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "2250160 Examples seen. Accuracy:0.9604 Error: 0.17183 Loss:0.18872 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.66s\n", - "2250800 Examples seen. Accuracy:0.9612 Error: 0.07799 Loss:0.12348 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2251440 Examples seen. Accuracy:0.9603 Error: 0.09607 Loss:0.08760 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "2252080 Examples seen. Accuracy:0.9614 Error: 0.13596 Loss:0.08938 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.72s\n", - "2252720 Examples seen. Accuracy:0.9616 Error: 0.09007 Loss:0.05811 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.79s\n", - "2253360 Examples seen. Accuracy:0.9614 Error: 0.03817 Loss:0.02145 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.68s\n", - "2254000 Examples seen. Accuracy:0.9610 Error: 0.10563 Loss:0.10389 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.62s\n", - "2254640 Examples seen. Accuracy:0.9614 Error: 0.00998 Loss:0.00513 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.61s\n", - "2255280 Examples seen. Accuracy:0.9616 Error: 0.05746 Loss:0.03908 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.62s\n", - "2255920 Examples seen. Accuracy:0.9611 Error: 0.10721 Loss:0.14647 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2256560 Examples seen. Accuracy:0.9614 Error: 0.18735 Loss:0.14859 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.62s\n", - "2257200 Examples seen. Accuracy:0.9621 Error: 0.09981 Loss:0.07409 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.61s\n", - "2257840 Examples seen. Accuracy:0.9615 Error: 0.11865 Loss:0.12271 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.61s\n", - "2258480 Examples seen. Accuracy:0.9618 Error: 0.13714 Loss:0.11136 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2259120 Examples seen. Accuracy:0.9618 Error: 0.11446 Loss:0.13939 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.61s\n", - "2259760 Examples seen. Accuracy:0.9625 Error: 0.09157 Loss:0.06055 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2260400 Examples seen. Accuracy:0.9630 Error: 0.07051 Loss:0.06171 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.61s\n", - "2261040 Examples seen. Accuracy:0.9620 Error: 0.09042 Loss:0.10776 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.61s\n", - "2261680 Examples seen. Accuracy:0.9615 Error: 0.10618 Loss:0.11881 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.62s\n", - "2262320 Examples seen. Accuracy:0.9626 Error: 0.04730 Loss:0.02709 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.64s\n", - "2262960 Examples seen. Accuracy:0.9648 Error: 0.09662 Loss:0.06404 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.64s\n", - "2263600 Examples seen. Accuracy:0.9634 Error: 0.13704 Loss:0.12595 Threads: 8 Forward time: 5.11s Backward time: 3.24s Step time: 3.65s\n", - "2264240 Examples seen. Accuracy:0.9625 Error: 0.08465 Loss:0.06717 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.64s\n", - "2264880 Examples seen. Accuracy:0.9625 Error: 0.05490 Loss:0.03704 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.63s\n", - "2265520 Examples seen. Accuracy:0.9623 Error: 0.06769 Loss:0.10501 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2266160 Examples seen. Accuracy:0.9620 Error: 0.08800 Loss:0.07231 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.62s\n", - "2266800 Examples seen. Accuracy:0.9612 Error: 0.11540 Loss:0.17783 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.61s\n", - "2267440 Examples seen. Accuracy:0.9604 Error: 0.09549 Loss:0.07448 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.63s\n", - "2268080 Examples seen. Accuracy:0.9603 Error: 0.15698 Loss:0.17653 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2268720 Examples seen. Accuracy:0.9612 Error: 0.08598 Loss:0.08932 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.63s\n", - "2269360 Examples seen. Accuracy:0.9618 Error: 0.09553 Loss:0.07079 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.61s\n", - "2270000 Examples seen. Accuracy:0.9618 Error: 0.12322 Loss:0.16417 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2270640 Examples seen. Accuracy:0.9614 Error: 0.13161 Loss:0.11291 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.64s\n", - "2271280 Examples seen. Accuracy:0.9617 Error: 0.03687 Loss:0.02291 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "2271920 Examples seen. Accuracy:0.9614 Error: 0.04881 Loss:0.03459 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.62s\n", - "2272560 Examples seen. Accuracy:0.9620 Error: 0.07914 Loss:0.13548 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.64s\n", - "2273200 Examples seen. Accuracy:0.9619 Error: 0.10582 Loss:0.17881 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.64s\n", - "2273840 Examples seen. Accuracy:0.9622 Error: 0.02611 Loss:0.01410 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.62s\n", - "2274480 Examples seen. Accuracy:0.9611 Error: 0.16059 Loss:0.21528 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.67s\n", - "2275120 Examples seen. Accuracy:0.9603 Error: 0.18593 Loss:0.23348 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.66s\n", - "2275760 Examples seen. Accuracy:0.9610 Error: 0.07312 Loss:0.04423 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.64s\n", - "2276400 Examples seen. Accuracy:0.9608 Error: 0.10570 Loss:0.08029 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.65s\n", - "2277040 Examples seen. Accuracy:0.9621 Error: 0.03549 Loss:0.02158 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.63s\n", - "2277680 Examples seen. Accuracy:0.9622 Error: 0.14673 Loss:0.25365 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "2278320 Examples seen. Accuracy:0.9616 Error: 0.08798 Loss:0.05844 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.64s\n", - "2278960 Examples seen. Accuracy:0.9619 Error: 0.04089 Loss:0.02319 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "2279600 Examples seen. Accuracy:0.9607 Error: 0.16720 Loss:0.15693 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.65s\n", - "2280240 Examples seen. Accuracy:0.9616 Error: 0.16471 Loss:0.18166 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "2280880 Examples seen. Accuracy:0.9616 Error: 0.10903 Loss:0.09177 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.68s\n", - "2281520 Examples seen. Accuracy:0.9626 Error: 0.07130 Loss:0.04390 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "2282160 Examples seen. Accuracy:0.9627 Error: 0.12406 Loss:0.10357 Threads: 8 Forward time: 5.37s Backward time: 3.42s Step time: 3.82s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2282800 Examples seen. Accuracy:0.9639 Error: 0.04753 Loss:0.03004 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.82s\n", - "2283440 Examples seen. Accuracy:0.9631 Error: 0.04131 Loss:0.02814 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.68s\n", - "2284080 Examples seen. Accuracy:0.9627 Error: 0.08728 Loss:0.06733 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.68s\n", - "2284720 Examples seen. Accuracy:0.9627 Error: 0.04914 Loss:0.08239 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.70s\n", - "2285360 Examples seen. Accuracy:0.9628 Error: 0.13192 Loss:0.11839 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.77s\n", - "2286000 Examples seen. Accuracy:0.9634 Error: 0.12382 Loss:0.08834 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.75s\n", - "2286640 Examples seen. Accuracy:0.9637 Error: 0.06468 Loss:0.04005 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "2287280 Examples seen. Accuracy:0.9635 Error: 0.07094 Loss:0.09748 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.73s\n", - "2287920 Examples seen. Accuracy:0.9634 Error: 0.08314 Loss:0.11998 Threads: 8 Forward time: 5.09s Backward time: 3.30s Step time: 3.82s\n", - "2288560 Examples seen. Accuracy:0.9638 Error: 0.03340 Loss:0.02209 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.75s\n", - "2289200 Examples seen. Accuracy:0.9635 Error: 0.03898 Loss:0.02286 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 3.77s\n", - "2289840 Examples seen. Accuracy:0.9625 Error: 0.08315 Loss:0.08321 Threads: 8 Forward time: 5.35s Backward time: 3.39s Step time: 4.02s\n", - "2290480 Examples seen. Accuracy:0.9626 Error: 0.03112 Loss:0.02013 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.87s\n", - "2291120 Examples seen. Accuracy:0.9629 Error: 0.05307 Loss:0.04526 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.68s\n", - "2291760 Examples seen. Accuracy:0.9628 Error: 0.09894 Loss:0.07864 Threads: 8 Forward time: 5.13s Backward time: 3.30s Step time: 3.95s\n", - "2292400 Examples seen. Accuracy:0.9637 Error: 0.07239 Loss:0.04569 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.78s\n", - "2293040 Examples seen. Accuracy:0.9638 Error: 0.11483 Loss:0.09025 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.68s\n", - "2293680 Examples seen. Accuracy:0.9633 Error: 0.10536 Loss:0.09038 Threads: 8 Forward time: 5.13s Backward time: 3.31s Step time: 3.70s\n", - "2294320 Examples seen. Accuracy:0.9635 Error: 0.07328 Loss:0.06695 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.70s\n", - "2294960 Examples seen. Accuracy:0.9646 Error: 0.03526 Loss:0.02102 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.72s\n", - "Starting Validation.\n", - "Epochs: 46 Examples seen:2295584 Validation Accuracy: 0.9851 Validation Error: 0.0408 Validation Loss: 0.0409 Total time: 248.38min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 46. Working time: 4.14 hours.\n", - "2296224 Examples seen. Accuracy:0.9659 Error: 0.06586 Loss:0.04170 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.74s\n", - "2296864 Examples seen. Accuracy:0.9669 Error: 0.10571 Loss:0.08581 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2297504 Examples seen. Accuracy:0.9672 Error: 0.12960 Loss:0.17848 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.63s\n", - "2298144 Examples seen. Accuracy:0.9668 Error: 0.07218 Loss:0.04813 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.69s\n", - "2298784 Examples seen. Accuracy:0.9660 Error: 0.04048 Loss:0.03776 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2299424 Examples seen. Accuracy:0.9656 Error: 0.11407 Loss:0.09519 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.63s\n", - "2300064 Examples seen. Accuracy:0.9649 Error: 0.16443 Loss:0.19128 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.62s\n", - "2300704 Examples seen. Accuracy:0.9649 Error: 0.05915 Loss:0.05190 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.64s\n", - "2301344 Examples seen. Accuracy:0.9645 Error: 0.06060 Loss:0.06638 Threads: 8 Forward time: 5.09s Backward time: 3.33s Step time: 3.73s\n", - "2301984 Examples seen. Accuracy:0.9642 Error: 0.05887 Loss:0.03872 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.71s\n", - "2302624 Examples seen. Accuracy:0.9641 Error: 0.07616 Loss:0.05571 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2303264 Examples seen. Accuracy:0.9634 Error: 0.16277 Loss:0.15420 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.63s\n", - "2303904 Examples seen. Accuracy:0.9622 Error: 0.08038 Loss:0.05928 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.63s\n", - "2304544 Examples seen. Accuracy:0.9625 Error: 0.14119 Loss:0.12603 Threads: 8 Forward time: 4.91s Backward time: 3.24s Step time: 3.69s\n", - "2305184 Examples seen. Accuracy:0.9634 Error: 0.06931 Loss:0.05200 Threads: 8 Forward time: 5.26s Backward time: 3.37s Step time: 3.71s\n", - "2305824 Examples seen. Accuracy:0.9637 Error: 0.09103 Loss:0.09831 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.69s\n", - "2306464 Examples seen. Accuracy:0.9635 Error: 0.10158 Loss:0.08078 Threads: 8 Forward time: 5.06s Backward time: 3.28s Step time: 3.71s\n", - "2307104 Examples seen. Accuracy:0.9633 Error: 0.05507 Loss:0.07271 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "2307744 Examples seen. Accuracy:0.9630 Error: 0.07128 Loss:0.08737 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.68s\n", - "2308384 Examples seen. Accuracy:0.9629 Error: 0.08583 Loss:0.06452 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.63s\n", - "2309024 Examples seen. Accuracy:0.9635 Error: 0.09245 Loss:0.07497 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.77s\n", - "2309664 Examples seen. Accuracy:0.9642 Error: 0.12577 Loss:0.10954 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.66s\n", - "2310304 Examples seen. Accuracy:0.9632 Error: 0.07365 Loss:0.05179 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.65s\n", - "2310944 Examples seen. Accuracy:0.9634 Error: 0.05146 Loss:0.04511 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.66s\n", - "2311584 Examples seen. Accuracy:0.9630 Error: 0.07177 Loss:0.06599 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "2312224 Examples seen. Accuracy:0.9639 Error: 0.05840 Loss:0.06960 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "2312864 Examples seen. Accuracy:0.9628 Error: 0.10309 Loss:0.09770 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "2313504 Examples seen. Accuracy:0.9627 Error: 0.08128 Loss:0.08986 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.72s\n", - "2314144 Examples seen. Accuracy:0.9632 Error: 0.08707 Loss:0.06622 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.66s\n", - "2314784 Examples seen. Accuracy:0.9636 Error: 0.08530 Loss:0.08689 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.67s\n", - "2315424 Examples seen. Accuracy:0.9631 Error: 0.11385 Loss:0.23239 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.67s\n", - "2316064 Examples seen. Accuracy:0.9635 Error: 0.16649 Loss:0.17085 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "2316704 Examples seen. Accuracy:0.9630 Error: 0.11639 Loss:0.09509 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.67s\n", - "2317344 Examples seen. Accuracy:0.9630 Error: 0.07538 Loss:0.04672 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.67s\n", - "2317984 Examples seen. Accuracy:0.9625 Error: 0.07616 Loss:0.10934 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.67s\n", - "2318624 Examples seen. Accuracy:0.9625 Error: 0.14170 Loss:0.16748 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.67s\n", - "2319264 Examples seen. Accuracy:0.9618 Error: 0.07078 Loss:0.06174 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2319904 Examples seen. Accuracy:0.9612 Error: 0.15473 Loss:0.14835 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "2320544 Examples seen. Accuracy:0.9611 Error: 0.13248 Loss:0.14146 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "2321184 Examples seen. Accuracy:0.9617 Error: 0.10677 Loss:0.07230 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "2321824 Examples seen. Accuracy:0.9627 Error: 0.01910 Loss:0.01003 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.72s\n", - "2322464 Examples seen. Accuracy:0.9629 Error: 0.16464 Loss:0.35405 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.71s\n", - "2323104 Examples seen. Accuracy:0.9640 Error: 0.06082 Loss:0.06551 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.73s\n", - "2323744 Examples seen. Accuracy:0.9627 Error: 0.17456 Loss:0.26165 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.71s\n", - "2324384 Examples seen. Accuracy:0.9626 Error: 0.09555 Loss:0.11007 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.77s\n", - "2325024 Examples seen. Accuracy:0.9632 Error: 0.10559 Loss:0.11993 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.71s\n", - "2325664 Examples seen. Accuracy:0.9646 Error: 0.04329 Loss:0.02862 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.71s\n", - "2326304 Examples seen. Accuracy:0.9655 Error: 0.08323 Loss:0.06619 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.79s\n", - "2326944 Examples seen. Accuracy:0.9649 Error: 0.21153 Loss:0.34516 Threads: 8 Forward time: 5.19s Backward time: 3.35s Step time: 3.80s\n", - "2327584 Examples seen. Accuracy:0.9649 Error: 0.13711 Loss:0.13058 Threads: 8 Forward time: 5.11s Backward time: 3.31s Step time: 3.80s\n", - "2328224 Examples seen. Accuracy:0.9651 Error: 0.07983 Loss:0.07648 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "2328864 Examples seen. Accuracy:0.9650 Error: 0.10170 Loss:0.09765 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.68s\n", - "2329504 Examples seen. Accuracy:0.9645 Error: 0.04890 Loss:0.03065 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "2330144 Examples seen. Accuracy:0.9634 Error: 0.16142 Loss:0.13705 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "2330784 Examples seen. Accuracy:0.9623 Error: 0.09151 Loss:0.06941 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2331424 Examples seen. Accuracy:0.9630 Error: 0.01054 Loss:0.00552 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.67s\n", - "2332064 Examples seen. Accuracy:0.9615 Error: 0.12485 Loss:0.12610 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.74s\n", - "2332704 Examples seen. Accuracy:0.9619 Error: 0.08962 Loss:0.11357 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.67s\n", - "2333344 Examples seen. Accuracy:0.9622 Error: 0.09270 Loss:0.07307 Threads: 8 Forward time: 5.11s Backward time: 3.25s Step time: 3.75s\n", - "2333984 Examples seen. Accuracy:0.9628 Error: 0.06777 Loss:0.04050 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.72s\n", - "2334624 Examples seen. Accuracy:0.9629 Error: 0.09292 Loss:0.09402 Threads: 8 Forward time: 5.06s Backward time: 3.22s Step time: 3.73s\n", - "2335264 Examples seen. Accuracy:0.9635 Error: 0.04261 Loss:0.02376 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.72s\n", - "2335904 Examples seen. Accuracy:0.9632 Error: 0.06623 Loss:0.04791 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "2336544 Examples seen. Accuracy:0.9627 Error: 0.09927 Loss:0.08880 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.70s\n", - "2337184 Examples seen. Accuracy:0.9640 Error: 0.06512 Loss:0.07985 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.79s\n", - "2337824 Examples seen. Accuracy:0.9639 Error: 0.04249 Loss:0.02918 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.64s\n", - "2338464 Examples seen. Accuracy:0.9634 Error: 0.08471 Loss:0.08572 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.68s\n", - "2339104 Examples seen. Accuracy:0.9646 Error: 0.03504 Loss:0.01831 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.67s\n", - "2339744 Examples seen. Accuracy:0.9649 Error: 0.02018 Loss:0.01086 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.66s\n", - "2340384 Examples seen. Accuracy:0.9650 Error: 0.06721 Loss:0.07485 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.70s\n", - "2341024 Examples seen. Accuracy:0.9648 Error: 0.13182 Loss:0.14514 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.68s\n", - "2341664 Examples seen. Accuracy:0.9645 Error: 0.11368 Loss:0.10051 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "2342304 Examples seen. Accuracy:0.9650 Error: 0.05954 Loss:0.04595 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.71s\n", - "2342944 Examples seen. Accuracy:0.9645 Error: 0.05092 Loss:0.03668 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.61s\n", - "2343584 Examples seen. Accuracy:0.9647 Error: 0.06973 Loss:0.07446 Threads: 8 Forward time: 5.07s Backward time: 3.28s Step time: 3.66s\n", - "2344224 Examples seen. Accuracy:0.9625 Error: 0.20134 Loss:0.21778 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.69s\n", - "2344864 Examples seen. Accuracy:0.9629 Error: 0.12162 Loss:0.20828 Threads: 8 Forward time: 4.97s Backward time: 3.17s Step time: 3.62s\n", - "Starting Validation.\n", - "Epochs: 47 Examples seen:2345488 Validation Accuracy: 0.9844 Validation Error: 0.0403 Validation Loss: 0.0403 Total time: 253.64min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 47. Working time: 4.23 hours.\n", - "2346128 Examples seen. Accuracy:0.9621 Error: 0.12571 Loss:0.10451 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.71s\n", - "2346768 Examples seen. Accuracy:0.9625 Error: 0.18865 Loss:0.27467 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.60s\n", - "2347408 Examples seen. Accuracy:0.9632 Error: 0.06129 Loss:0.04071 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.61s\n", - "2348048 Examples seen. Accuracy:0.9624 Error: 0.09933 Loss:0.08646 Threads: 8 Forward time: 5.17s Backward time: 3.37s Step time: 3.75s\n", - "2348688 Examples seen. Accuracy:0.9636 Error: 0.01899 Loss:0.01031 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.64s\n", - "2349328 Examples seen. Accuracy:0.9639 Error: 0.04952 Loss:0.03411 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.69s\n", - "2349968 Examples seen. Accuracy:0.9651 Error: 0.05086 Loss:0.03274 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2350608 Examples seen. Accuracy:0.9641 Error: 0.14228 Loss:0.13666 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2351248 Examples seen. Accuracy:0.9642 Error: 0.06595 Loss:0.05662 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2351888 Examples seen. Accuracy:0.9641 Error: 0.08222 Loss:0.06902 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.63s\n", - "2352528 Examples seen. Accuracy:0.9639 Error: 0.11847 Loss:0.10436 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.62s\n", - "2353168 Examples seen. Accuracy:0.9637 Error: 0.09705 Loss:0.08791 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.63s\n", - "2353808 Examples seen. Accuracy:0.9642 Error: 0.09139 Loss:0.10848 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.61s\n", - "2354448 Examples seen. Accuracy:0.9622 Error: 0.09577 Loss:0.07764 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "2355088 Examples seen. Accuracy:0.9628 Error: 0.08163 Loss:0.07548 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.59s\n", - "2355728 Examples seen. Accuracy:0.9626 Error: 0.07974 Loss:0.07101 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.63s\n", - "2356368 Examples seen. Accuracy:0.9628 Error: 0.06909 Loss:0.04493 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.60s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2357008 Examples seen. Accuracy:0.9620 Error: 0.15517 Loss:0.16409 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.62s\n", - "2357648 Examples seen. Accuracy:0.9628 Error: 0.13398 Loss:0.10719 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.64s\n", - "2358288 Examples seen. Accuracy:0.9634 Error: 0.05161 Loss:0.03175 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.67s\n", - "2358928 Examples seen. Accuracy:0.9633 Error: 0.08879 Loss:0.05667 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.63s\n", - "2359568 Examples seen. Accuracy:0.9635 Error: 0.03576 Loss:0.02231 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2360208 Examples seen. Accuracy:0.9635 Error: 0.03643 Loss:0.02726 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.64s\n", - "2360848 Examples seen. Accuracy:0.9635 Error: 0.05455 Loss:0.04282 Threads: 8 Forward time: 4.92s Backward time: 3.19s Step time: 3.61s\n", - "2361488 Examples seen. Accuracy:0.9636 Error: 0.03514 Loss:0.03008 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.62s\n", - "2362128 Examples seen. Accuracy:0.9629 Error: 0.08633 Loss:0.09258 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.64s\n", - "2362768 Examples seen. Accuracy:0.9633 Error: 0.08147 Loss:0.05970 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.63s\n", - "2363408 Examples seen. Accuracy:0.9634 Error: 0.06049 Loss:0.03963 Threads: 8 Forward time: 5.08s Backward time: 3.28s Step time: 3.69s\n", - "2364048 Examples seen. Accuracy:0.9637 Error: 0.09302 Loss:0.07505 Threads: 8 Forward time: 5.09s Backward time: 3.28s Step time: 3.73s\n", - "2364688 Examples seen. Accuracy:0.9624 Error: 0.05407 Loss:0.04131 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.65s\n", - "2365328 Examples seen. Accuracy:0.9627 Error: 0.12092 Loss:0.14723 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.72s\n", - "2365968 Examples seen. Accuracy:0.9619 Error: 0.12603 Loss:0.10947 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.63s\n", - "2366608 Examples seen. Accuracy:0.9625 Error: 0.08230 Loss:0.07550 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.64s\n", - "2367248 Examples seen. Accuracy:0.9619 Error: 0.04252 Loss:0.03187 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "2367888 Examples seen. Accuracy:0.9626 Error: 0.05655 Loss:0.07812 Threads: 8 Forward time: 5.10s Backward time: 3.27s Step time: 3.66s\n", - "2368528 Examples seen. Accuracy:0.9633 Error: 0.10837 Loss:0.13137 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.65s\n", - "2369168 Examples seen. Accuracy:0.9632 Error: 0.09525 Loss:0.08900 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2369808 Examples seen. Accuracy:0.9637 Error: 0.20888 Loss:0.23777 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "2370448 Examples seen. Accuracy:0.9644 Error: 0.06795 Loss:0.09751 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.66s\n", - "2371088 Examples seen. Accuracy:0.9642 Error: 0.09239 Loss:0.16268 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.88s\n", - "2371728 Examples seen. Accuracy:0.9642 Error: 0.11508 Loss:0.10845 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.65s\n", - "2372368 Examples seen. Accuracy:0.9634 Error: 0.04192 Loss:0.02852 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.63s\n", - "2373008 Examples seen. Accuracy:0.9632 Error: 0.09111 Loss:0.10761 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.61s\n", - "2373648 Examples seen. Accuracy:0.9629 Error: 0.13887 Loss:0.16349 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2374288 Examples seen. Accuracy:0.9631 Error: 0.07565 Loss:0.06116 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.62s\n", - "2374928 Examples seen. Accuracy:0.9625 Error: 0.13207 Loss:0.20477 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.62s\n", - "2375568 Examples seen. Accuracy:0.9623 Error: 0.10514 Loss:0.08668 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "2376208 Examples seen. Accuracy:0.9618 Error: 0.05733 Loss:0.05255 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "2376848 Examples seen. Accuracy:0.9625 Error: 0.06629 Loss:0.05432 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "2377488 Examples seen. Accuracy:0.9628 Error: 0.12186 Loss:0.11690 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.61s\n", - "2378128 Examples seen. Accuracy:0.9615 Error: 0.08455 Loss:0.05888 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "2378768 Examples seen. Accuracy:0.9629 Error: 0.08426 Loss:0.07770 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.61s\n", - "2379408 Examples seen. Accuracy:0.9624 Error: 0.07930 Loss:0.09174 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.62s\n", - "2380048 Examples seen. Accuracy:0.9626 Error: 0.09010 Loss:0.05617 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.62s\n", - "2380688 Examples seen. Accuracy:0.9620 Error: 0.07518 Loss:0.15200 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.62s\n", - "2381328 Examples seen. Accuracy:0.9627 Error: 0.08042 Loss:0.15040 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "2381968 Examples seen. Accuracy:0.9623 Error: 0.08539 Loss:0.06390 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2382608 Examples seen. Accuracy:0.9620 Error: 0.04215 Loss:0.02372 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "2383248 Examples seen. Accuracy:0.9634 Error: 0.02382 Loss:0.01421 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.72s\n", - "2383888 Examples seen. Accuracy:0.9637 Error: 0.19136 Loss:0.17446 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.66s\n", - "2384528 Examples seen. Accuracy:0.9632 Error: 0.09805 Loss:0.09111 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "2385168 Examples seen. Accuracy:0.9624 Error: 0.09247 Loss:0.07906 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2385808 Examples seen. Accuracy:0.9628 Error: 0.07368 Loss:0.04764 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "2386448 Examples seen. Accuracy:0.9631 Error: 0.07260 Loss:0.08953 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.65s\n", - "2387088 Examples seen. Accuracy:0.9636 Error: 0.06226 Loss:0.04941 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.69s\n", - "2387728 Examples seen. Accuracy:0.9629 Error: 0.04731 Loss:0.03388 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "2388368 Examples seen. Accuracy:0.9633 Error: 0.04096 Loss:0.02435 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.68s\n", - "2389008 Examples seen. Accuracy:0.9635 Error: 0.05838 Loss:0.06045 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.65s\n", - "2389648 Examples seen. Accuracy:0.9637 Error: 0.07119 Loss:0.04667 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.73s\n", - "2390288 Examples seen. Accuracy:0.9639 Error: 0.17132 Loss:0.22397 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.71s\n", - "2390928 Examples seen. Accuracy:0.9630 Error: 0.10484 Loss:0.07961 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.71s\n", - "2391568 Examples seen. Accuracy:0.9621 Error: 0.07735 Loss:0.05401 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.68s\n", - "2392208 Examples seen. Accuracy:0.9621 Error: 0.07971 Loss:0.07634 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "2392848 Examples seen. Accuracy:0.9616 Error: 0.10771 Loss:0.11172 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.67s\n", - "2393488 Examples seen. Accuracy:0.9605 Error: 0.04745 Loss:0.03086 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.66s\n", - "2394128 Examples seen. Accuracy:0.9601 Error: 0.04269 Loss:0.02374 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2394768 Examples seen. Accuracy:0.9598 Error: 0.10447 Loss:0.08630 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.63s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 48 Examples seen:2395392 Validation Accuracy: 0.9859 Validation Error: 0.0398 Validation Loss: 0.0396 Total time: 258.86min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.459 Min Weight: -0.377 Max Output: 6.108 Min Output: -5.924 TNNetConvolutionLinear 66,66,64 Times: 8.55s 0.40s Parent:0\n", - "Layer 2 Max Output: 6.108 Min Output: -3.525 TNNetMaxPool 33,33,64 Times: 3.62s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.650 Min Weight: 0.228 Max Output: 9.040 Min Output: -5.770 TNNetMovingStdNormalization 33,33,64 Times: 0.28s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.439 Min Weight: -0.230 Max Output: 11.407 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.87s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.397 Min Weight: -0.367 Max Output: 11.943 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.89s 0.18s Parent:4\n", - "Layer 6 Max Output: 11.943 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.48s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.431 Min Weight: -0.275 Max Output: 8.258 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.290 Min Weight: -0.235 Max Output: 6.972 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.48s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.253 Min Weight: -0.221 Max Output: 13.311 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.46s 0.02s Parent:8\n", - "Layer 10 Max Output: 13.311 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 13.311 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.382 Min Weight: -0.396 Max Output: 41.235 Min Output: -17.884 TNNetFullConnectLinear 39,1,1 Times: 0.03s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 48. Working time: 4.31 hours.\n", - "2396032 Examples seen. Accuracy:0.9600 Error: 0.08347 Loss:0.06521 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.76s\n", - "2396672 Examples seen. Accuracy:0.9615 Error: 0.04975 Loss:0.04257 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.66s\n", - "2397312 Examples seen. Accuracy:0.9630 Error: 0.15518 Loss:0.22900 Threads: 8 Forward time: 4.96s Backward time: 3.18s Step time: 3.64s\n", - "2397952 Examples seen. Accuracy:0.9621 Error: 0.10374 Loss:0.16176 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.63s\n", - "2398592 Examples seen. Accuracy:0.9608 Error: 0.14474 Loss:0.16522 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.69s\n", - "2399232 Examples seen. Accuracy:0.9611 Error: 0.09554 Loss:0.09972 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.67s\n", - "2399872 Examples seen. Accuracy:0.9615 Error: 0.09499 Loss:0.08449 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.66s\n", - "2400512 Examples seen. Accuracy:0.9621 Error: 0.09522 Loss:0.09784 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.73s\n", - "2401152 Examples seen. Accuracy:0.9631 Error: 0.03988 Loss:0.02454 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.70s\n", - "2401792 Examples seen. Accuracy:0.9635 Error: 0.04436 Loss:0.03415 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.73s\n", - "2402432 Examples seen. Accuracy:0.9634 Error: 0.08239 Loss:0.05189 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.69s\n", - "2403072 Examples seen. Accuracy:0.9630 Error: 0.10845 Loss:0.08257 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.69s\n", - "2403712 Examples seen. Accuracy:0.9625 Error: 0.08418 Loss:0.09886 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.72s\n", - "2404352 Examples seen. Accuracy:0.9624 Error: 0.03258 Loss:0.01814 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.74s\n", - "2404992 Examples seen. Accuracy:0.9637 Error: 0.02063 Loss:0.01094 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.76s\n", - "2405632 Examples seen. Accuracy:0.9641 Error: 0.01119 Loss:0.00600 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.71s\n", - "2406272 Examples seen. Accuracy:0.9641 Error: 0.06662 Loss:0.06698 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.73s\n", - "2406912 Examples seen. Accuracy:0.9633 Error: 0.08579 Loss:0.08309 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2407552 Examples seen. Accuracy:0.9629 Error: 0.03959 Loss:0.02273 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.65s\n", - "2408192 Examples seen. Accuracy:0.9629 Error: 0.07206 Loss:0.07031 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "2408832 Examples seen. Accuracy:0.9642 Error: 0.03145 Loss:0.01727 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.64s\n", - "2409472 Examples seen. Accuracy:0.9646 Error: 0.15736 Loss:0.20048 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.63s\n", - "2410112 Examples seen. Accuracy:0.9643 Error: 0.11770 Loss:0.09110 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.64s\n", - "2410752 Examples seen. Accuracy:0.9635 Error: 0.04128 Loss:0.04368 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.63s\n", - "2411392 Examples seen. Accuracy:0.9625 Error: 0.15233 Loss:0.20459 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.71s\n", - "2412032 Examples seen. Accuracy:0.9632 Error: 0.12874 Loss:0.18221 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.64s\n", - "2412672 Examples seen. Accuracy:0.9635 Error: 0.03913 Loss:0.02232 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.64s\n", - "2413312 Examples seen. Accuracy:0.9621 Error: 0.07859 Loss:0.07963 Threads: 8 Forward time: 5.12s Backward time: 3.30s Step time: 3.83s\n", - "2413952 Examples seen. Accuracy:0.9620 Error: 0.07945 Loss:0.09900 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.71s\n", - "2414592 Examples seen. Accuracy:0.9627 Error: 0.06737 Loss:0.07263 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.68s\n", - "2415232 Examples seen. Accuracy:0.9635 Error: 0.13535 Loss:0.12909 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.65s\n", - "2415872 Examples seen. Accuracy:0.9645 Error: 0.06331 Loss:0.05422 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.66s\n", - "2416512 Examples seen. Accuracy:0.9638 Error: 0.11218 Loss:0.10808 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "2417152 Examples seen. Accuracy:0.9625 Error: 0.06961 Loss:0.04812 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "2417792 Examples seen. Accuracy:0.9618 Error: 0.09375 Loss:0.10179 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.66s\n", - "2418432 Examples seen. Accuracy:0.9613 Error: 0.06926 Loss:0.11880 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "2419072 Examples seen. Accuracy:0.9620 Error: 0.15010 Loss:0.15108 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "2419712 Examples seen. Accuracy:0.9622 Error: 0.09841 Loss:0.16576 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.67s\n", - "2420352 Examples seen. Accuracy:0.9617 Error: 0.09181 Loss:0.08123 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.68s\n", - "2420992 Examples seen. Accuracy:0.9609 Error: 0.07757 Loss:0.06275 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2421632 Examples seen. Accuracy:0.9611 Error: 0.05869 Loss:0.04683 Threads: 8 Forward time: 4.88s Backward time: 3.19s Step time: 3.65s\n", - "2422272 Examples seen. Accuracy:0.9611 Error: 0.12574 Loss:0.13354 Threads: 8 Forward time: 5.06s Backward time: 3.21s Step time: 3.64s\n", - "2422912 Examples seen. Accuracy:0.9607 Error: 0.07887 Loss:0.05618 Threads: 8 Forward time: 5.10s Backward time: 3.24s Step time: 3.69s\n", - "2423552 Examples seen. Accuracy:0.9612 Error: 0.03758 Loss:0.03187 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "2424192 Examples seen. Accuracy:0.9606 Error: 0.04157 Loss:0.04722 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.65s\n", - "2424832 Examples seen. Accuracy:0.9604 Error: 0.08743 Loss:0.05742 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.66s\n", - "2425472 Examples seen. Accuracy:0.9604 Error: 0.10177 Loss:0.18647 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.65s\n", - "2426112 Examples seen. Accuracy:0.9612 Error: 0.14764 Loss:0.13367 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.67s\n", - "2426752 Examples seen. Accuracy:0.9621 Error: 0.02184 Loss:0.01407 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.68s\n", - "2427392 Examples seen. Accuracy:0.9618 Error: 0.07716 Loss:0.07552 Threads: 8 Forward time: 5.09s Backward time: 3.30s Step time: 3.76s\n", - "2428032 Examples seen. Accuracy:0.9612 Error: 0.10830 Loss:0.10426 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.70s\n", - "2428672 Examples seen. Accuracy:0.9618 Error: 0.08810 Loss:0.06320 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "2429312 Examples seen. Accuracy:0.9612 Error: 0.09988 Loss:0.11013 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.71s\n", - "2429952 Examples seen. Accuracy:0.9618 Error: 0.10986 Loss:0.19201 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.66s\n", - "2430592 Examples seen. Accuracy:0.9623 Error: 0.04762 Loss:0.02820 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.76s\n", - "2431232 Examples seen. Accuracy:0.9625 Error: 0.06206 Loss:0.03979 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.72s\n", - "2431872 Examples seen. Accuracy:0.9633 Error: 0.02842 Loss:0.01543 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "2432512 Examples seen. Accuracy:0.9639 Error: 0.16059 Loss:0.18375 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.71s\n", - "2433152 Examples seen. Accuracy:0.9640 Error: 0.07255 Loss:0.08636 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.70s\n", - "2433792 Examples seen. Accuracy:0.9650 Error: 0.05588 Loss:0.05599 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "2434432 Examples seen. Accuracy:0.9646 Error: 0.09000 Loss:0.07720 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.73s\n", - "2435072 Examples seen. Accuracy:0.9645 Error: 0.09038 Loss:0.09173 Threads: 8 Forward time: 5.10s Backward time: 3.31s Step time: 3.72s\n", - "2435712 Examples seen. Accuracy:0.9643 Error: 0.02760 Loss:0.01909 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.80s\n", - "2436352 Examples seen. Accuracy:0.9663 Error: 0.07000 Loss:0.04519 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "2436992 Examples seen. Accuracy:0.9662 Error: 0.19523 Loss:0.20279 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "2437632 Examples seen. Accuracy:0.9672 Error: 0.06155 Loss:0.06979 Threads: 8 Forward time: 5.08s Backward time: 3.28s Step time: 3.72s\n", - "2438272 Examples seen. Accuracy:0.9667 Error: 0.08146 Loss:0.06760 Threads: 8 Forward time: 5.07s Backward time: 3.27s Step time: 3.72s\n", - "2438912 Examples seen. Accuracy:0.9666 Error: 0.12747 Loss:0.17507 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.64s\n", - "2439552 Examples seen. Accuracy:0.9673 Error: 0.09810 Loss:0.07829 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.67s\n", - "2440192 Examples seen. Accuracy:0.9675 Error: 0.06045 Loss:0.03975 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.68s\n", - "2440832 Examples seen. Accuracy:0.9674 Error: 0.10832 Loss:0.08924 Threads: 8 Forward time: 5.10s Backward time: 3.33s Step time: 3.68s\n", - "2441472 Examples seen. Accuracy:0.9668 Error: 0.06639 Loss:0.05132 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.72s\n", - "2442112 Examples seen. Accuracy:0.9669 Error: 0.11082 Loss:0.10048 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.71s\n", - "2442752 Examples seen. Accuracy:0.9652 Error: 0.18231 Loss:0.19762 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.66s\n", - "2443392 Examples seen. Accuracy:0.9647 Error: 0.12482 Loss:0.08833 Threads: 8 Forward time: 5.07s Backward time: 3.22s Step time: 3.66s\n", - "2444032 Examples seen. Accuracy:0.9642 Error: 0.13215 Loss:0.09087 Threads: 8 Forward time: 5.06s Backward time: 3.22s Step time: 3.68s\n", - "2444672 Examples seen. Accuracy:0.9641 Error: 0.09566 Loss:0.07425 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.67s\n", - "Starting Validation.\n", - "Epochs: 49 Examples seen:2445296 Validation Accuracy: 0.9848 Validation Error: 0.0397 Validation Loss: 0.0398 Total time: 264.13min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 49. Working time: 4.4 hours.\n", - "2445936 Examples seen. Accuracy:0.9647 Error: 0.09779 Loss:0.10613 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.71s\n", - "2446576 Examples seen. Accuracy:0.9645 Error: 0.09939 Loss:0.10191 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.72s\n", - "2447216 Examples seen. Accuracy:0.9651 Error: 0.08636 Loss:0.06837 Threads: 8 Forward time: 5.03s Backward time: 3.20s Step time: 3.72s\n", - "2447856 Examples seen. Accuracy:0.9639 Error: 0.11687 Loss:0.11101 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.72s\n", - "2448496 Examples seen. Accuracy:0.9633 Error: 0.07291 Loss:0.05694 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.74s\n", - "2449136 Examples seen. Accuracy:0.9633 Error: 0.10625 Loss:0.12971 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.72s\n", - "2449776 Examples seen. Accuracy:0.9636 Error: 0.08616 Loss:0.07411 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.73s\n", - "2450416 Examples seen. Accuracy:0.9631 Error: 0.11457 Loss:0.08526 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.71s\n", - "2451056 Examples seen. Accuracy:0.9623 Error: 0.09590 Loss:0.10775 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.74s\n", - "2451696 Examples seen. Accuracy:0.9613 Error: 0.14777 Loss:0.12411 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.74s\n", - "2452336 Examples seen. Accuracy:0.9612 Error: 0.06299 Loss:0.06043 Threads: 8 Forward time: 4.89s Backward time: 3.22s Step time: 3.70s\n", - "2452976 Examples seen. Accuracy:0.9609 Error: 0.09018 Loss:0.09205 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.69s\n", - "2453616 Examples seen. Accuracy:0.9597 Error: 0.13754 Loss:0.21120 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.72s\n", - "2454256 Examples seen. Accuracy:0.9599 Error: 0.14907 Loss:0.12133 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.71s\n", - "2454896 Examples seen. Accuracy:0.9604 Error: 0.03802 Loss:0.02116 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.68s\n", - "2455536 Examples seen. Accuracy:0.9612 Error: 0.20154 Loss:0.22435 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.70s\n", - "2456176 Examples seen. Accuracy:0.9609 Error: 0.10072 Loss:0.12594 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.73s\n", - "2456816 Examples seen. Accuracy:0.9600 Error: 0.09720 Loss:0.08089 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.68s\n", - "2457456 Examples seen. Accuracy:0.9604 Error: 0.14106 Loss:0.11614 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "2458096 Examples seen. Accuracy:0.9611 Error: 0.05850 Loss:0.04063 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.71s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2458736 Examples seen. Accuracy:0.9610 Error: 0.10419 Loss:0.10172 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.73s\n", - "2459376 Examples seen. Accuracy:0.9600 Error: 0.06087 Loss:0.05568 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.64s\n", - "2460016 Examples seen. Accuracy:0.9606 Error: 0.15841 Loss:0.14317 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.72s\n", - "2460656 Examples seen. Accuracy:0.9603 Error: 0.06821 Loss:0.06307 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "2461296 Examples seen. Accuracy:0.9601 Error: 0.08654 Loss:0.09037 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.76s\n", - "2461936 Examples seen. Accuracy:0.9590 Error: 0.07632 Loss:0.06424 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.68s\n", - "2462576 Examples seen. Accuracy:0.9589 Error: 0.09366 Loss:0.15949 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.68s\n", - "2463216 Examples seen. Accuracy:0.9593 Error: 0.12086 Loss:0.12483 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.66s\n", - "2463856 Examples seen. Accuracy:0.9597 Error: 0.12919 Loss:0.20393 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.67s\n", - "2464496 Examples seen. Accuracy:0.9604 Error: 0.05858 Loss:0.03614 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.66s\n", - "2465136 Examples seen. Accuracy:0.9599 Error: 0.01954 Loss:0.01035 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.66s\n", - "2465776 Examples seen. Accuracy:0.9612 Error: 0.07069 Loss:0.05159 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.64s\n", - "2466416 Examples seen. Accuracy:0.9619 Error: 0.14101 Loss:0.18235 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "2467056 Examples seen. Accuracy:0.9627 Error: 0.02598 Loss:0.01503 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.71s\n", - "2467696 Examples seen. Accuracy:0.9643 Error: 0.04241 Loss:0.04202 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "2468336 Examples seen. Accuracy:0.9642 Error: 0.08662 Loss:0.05508 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.72s\n", - "2468976 Examples seen. Accuracy:0.9632 Error: 0.11560 Loss:0.09782 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.70s\n", - "2469616 Examples seen. Accuracy:0.9637 Error: 0.08343 Loss:0.07203 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "2470256 Examples seen. Accuracy:0.9638 Error: 0.05606 Loss:0.03345 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.73s\n", - "2470896 Examples seen. Accuracy:0.9634 Error: 0.09244 Loss:0.12337 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.70s\n", - "2471536 Examples seen. Accuracy:0.9642 Error: 0.06850 Loss:0.05097 Threads: 8 Forward time: 5.01s Backward time: 3.17s Step time: 3.67s\n", - "2472176 Examples seen. Accuracy:0.9642 Error: 0.12828 Loss:0.28280 Threads: 8 Forward time: 5.11s Backward time: 3.27s Step time: 3.75s\n", - "2472816 Examples seen. Accuracy:0.9636 Error: 0.06643 Loss:0.06648 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.73s\n", - "2473456 Examples seen. Accuracy:0.9632 Error: 0.09598 Loss:0.07311 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.72s\n", - "2474096 Examples seen. Accuracy:0.9626 Error: 0.07572 Loss:0.06650 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.76s\n", - "2474736 Examples seen. Accuracy:0.9624 Error: 0.16787 Loss:0.20741 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2475376 Examples seen. Accuracy:0.9635 Error: 0.06696 Loss:0.06897 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.64s\n", - "2476016 Examples seen. Accuracy:0.9635 Error: 0.09461 Loss:0.07941 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.70s\n", - "2476656 Examples seen. Accuracy:0.9638 Error: 0.11986 Loss:0.11124 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.68s\n", - "2477296 Examples seen. Accuracy:0.9639 Error: 0.12191 Loss:0.15176 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.73s\n", - "2477936 Examples seen. Accuracy:0.9641 Error: 0.07233 Loss:0.05448 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.61s\n", - "2478576 Examples seen. Accuracy:0.9644 Error: 0.09154 Loss:0.11773 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.61s\n", - "2479216 Examples seen. Accuracy:0.9647 Error: 0.04387 Loss:0.02665 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "2479856 Examples seen. Accuracy:0.9641 Error: 0.08265 Loss:0.05985 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.63s\n", - "2480496 Examples seen. Accuracy:0.9630 Error: 0.04707 Loss:0.02981 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2481136 Examples seen. Accuracy:0.9628 Error: 0.10169 Loss:0.07258 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.61s\n", - "2481776 Examples seen. Accuracy:0.9626 Error: 0.03753 Loss:0.02160 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.62s\n", - "2482416 Examples seen. Accuracy:0.9629 Error: 0.07974 Loss:0.06922 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.63s\n", - "2483056 Examples seen. Accuracy:0.9627 Error: 0.09616 Loss:0.07745 Threads: 8 Forward time: 5.07s Backward time: 3.29s Step time: 3.77s\n", - "2483696 Examples seen. Accuracy:0.9619 Error: 0.04000 Loss:0.02443 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.68s\n", - "2484336 Examples seen. Accuracy:0.9624 Error: 0.10765 Loss:0.08541 Threads: 8 Forward time: 5.26s Backward time: 3.35s Step time: 3.79s\n", - "2484976 Examples seen. Accuracy:0.9607 Error: 0.15562 Loss:0.11084 Threads: 8 Forward time: 5.14s Backward time: 3.28s Step time: 3.73s\n", - "2485616 Examples seen. Accuracy:0.9610 Error: 0.07639 Loss:0.05440 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.74s\n", - "2486256 Examples seen. Accuracy:0.9622 Error: 0.09286 Loss:0.06327 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2486896 Examples seen. Accuracy:0.9621 Error: 0.05640 Loss:0.03882 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.65s\n", - "2487536 Examples seen. Accuracy:0.9634 Error: 0.03119 Loss:0.01744 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.64s\n", - "2488176 Examples seen. Accuracy:0.9627 Error: 0.08898 Loss:0.07472 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.61s\n", - "2488816 Examples seen. Accuracy:0.9642 Error: 0.02770 Loss:0.01556 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.62s\n", - "2489456 Examples seen. Accuracy:0.9641 Error: 0.09492 Loss:0.06707 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.63s\n", - "2490096 Examples seen. Accuracy:0.9653 Error: 0.03905 Loss:0.02173 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.64s\n", - "2490736 Examples seen. Accuracy:0.9659 Error: 0.05739 Loss:0.03664 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "2491376 Examples seen. Accuracy:0.9668 Error: 0.04905 Loss:0.03240 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.63s\n", - "2492016 Examples seen. Accuracy:0.9659 Error: 0.04817 Loss:0.02833 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "2492656 Examples seen. Accuracy:0.9655 Error: 0.13183 Loss:0.18951 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2493296 Examples seen. Accuracy:0.9649 Error: 0.15715 Loss:0.15009 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.61s\n", - "2493936 Examples seen. Accuracy:0.9644 Error: 0.16351 Loss:0.17889 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.65s\n", - "2494576 Examples seen. Accuracy:0.9644 Error: 0.09626 Loss:0.06891 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.64s\n", - "Starting Validation.\n", - "Epochs: 50 Examples seen:2495200 Validation Accuracy: 0.9855 Validation Error: 0.0393 Validation Loss: 0.0392 Total time: 269.39min\n", - "Starting Testing.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epochs: 50 Examples seen:2495200 Test Accuracy: 0.9895 Test Error: 0.0335 Test Loss: 0.0307 Total time: 269.87min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 50. Working time: 4.5 hours.\n", - "Finished.\n" - ] - } - ], - "source": [ - "if os.path.isdir('plant'):\n", - " print(\"RUNNING: SimplePlantLeafDisease\")\n", - " !neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas index 41523b4c..ec626a37 100644 --- a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas +++ b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas @@ -43,7 +43,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(39), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); // change ProportionToLoad to a smaller number if you don't have available 16GB of RAM. diff --git a/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoading.pas b/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoading.pas index 27b74b3d..b80b3e0b 100644 --- a/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoading.pas +++ b/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoading.pas @@ -61,7 +61,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}2), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(FTrainingFileNames.ClassCount), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); WriteLn diff --git a/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoadingAPI.pas b/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoadingAPI.pas index 21b3bad3..a44202f0 100644 --- a/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoadingAPI.pas +++ b/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseLoadingAPI.pas @@ -67,7 +67,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetConvolutionLinear.Create({Features=}512, {FeatureSize=}3, {Padding=}1, {Stride=}2), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(FTrainingFileNames.ClassCount), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); diff --git a/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseParallel.pas b/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseParallel.pas index fb72f699..8eac8b72 100644 --- a/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseParallel.pas +++ b/examples/SimplePlantLeafDisease/SimplePlantLeafDiseaseParallel.pas @@ -121,7 +121,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(39), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); // change ProportionToLoad to a smaller number if you don't have available 32GB of RAM. diff --git a/examples/SimpleTinyImageNet/SimpleTinyImageNet.pas b/examples/SimpleTinyImageNet/SimpleTinyImageNet.pas index e9029bc7..e6e308e2 100644 --- a/examples/SimpleTinyImageNet/SimpleTinyImageNet.pas +++ b/examples/SimpleTinyImageNet/SimpleTinyImageNet.pas @@ -41,7 +41,7 @@ TTestCNNAlgo = class(TCustomApplication) TNNetDropout.Create(0.5), TNNetMaxPool.Create(2), TNNetFullConnectLinear.Create(200), - TNNetSoftMax.Create() + TNNetSoftMax.Create({SkipBackpropDerivative=}1) ]); NN.DebugStructure(); // change ProportionToLoad to a smaller number if you don't have available 6GB of RAM. diff --git a/examples/SuperResolution/README.md b/examples/SuperResolution/README.md index e9aa6340..e2be0637 100644 --- a/examples/SuperResolution/README.md +++ b/examples/SuperResolution/README.md @@ -1,8 +1,8 @@ -# Simple Super Resolution Example +# Super Resolution Command Line Tool ## Introduction The image at the right side shows an example. The smaller image is the original image while the bigger image is the image processed twice by a neural network trained to increase image resolution. -This example has been created via the **SuperResolution.lpi** command line tool with: +This example has been created via the **SuperResolution.lpi** command line tool. The parameter `-i` defines the input file while `-o` defines the output file: ``` #SuperResolution -i street.png -o street2.png diff --git a/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm b/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm index 9bad9f7c..7bd19b97 100644 --- a/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm +++ b/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm @@ -11,7 +11,7 @@ object FormVisualLearning: TFormVisualLearning OnCreate = FormCreate OnDestroy = FormDestroy Position = poScreenCenter - LCLVersion = '2.0.2.0' + LCLVersion = '2.0.12.0' object ButLearn: TButton Left = 768 Height = 45 diff --git a/neural/neuraldatasets.pas b/neural/neuraldatasets.pas index 45b04ccf..b015a4f0 100644 --- a/neural/neuraldatasets.pas +++ b/neural/neuraldatasets.pas @@ -277,6 +277,38 @@ procedure TestBatch // This function translates the original CIFAR10 labels to Animal/Machine labels. procedure TranslateCifar10VolumesToMachineAnimal(VolumeList: TNNetVolumeList); +{ + RandomSubstring: + This NLP function takes a string as input and returns a substring that starts + immediately after a randomly selected space character within the input string. + If there are no spaces in the input string, the entire string is returned as is. + The function is useful for obtaining a random piece of text from a given string, + which can be applied in various scenarios that require text randomization. + + Space positions are tracked using a TIntegerList. The Copy function is used + to extract the substring from the randomly selected space position to the end + of the input string. +} +function RandomSubstring(const InputString: string): string; + +{ + RemoveRandomChars: + This function takes a string and an integer count as input. It removes Count + number of characters at random positions from the given string Str. The length + of the string is recalculated in each iteration to account for the reduction in + the string's length after each character removal. +} +function RemoveRandomChars(const Str: string; Count: integer): string; + + +// This function randomly removes one word from the input string. +function RemoveRandomWord(const Str: string): string; + +type TNNetAAInteger = array of array of integer; + +procedure LoadIntegersInCSV(filename: string; + var aTokens: TNNetAAInteger; MaxRows: integer = 0); + {$IFNDEF FPC} function SwapEndian(I:integer):integer; procedure FindAllDirectories(AList: TStrings; const SearchPath: String; @@ -568,7 +600,7 @@ procedure TClassesAndElements.LoadImages(color_encoding: integer; NewSizeX: inte {$IFDEF Debug} Self.LoadImages_NTL(0,1); {$ELSE} - NTL.StartProc(@Self.LoadImages_NTL); + NTL.StartProc({$IFDEF FPC}@{$ENDIF}Self.LoadImages_NTL); {$ENDIF} end; NTL.Free; @@ -1704,4 +1736,125 @@ procedure TestBatch pOutput.Free; end; +function RemoveRandomWord(const Str: string): string; +var + WordList: TNNetStringList; + RandomWordIndex: integer; +begin + Result := Str; + // Split the string into words based on spaces. + WordList := CreateTokenizedStringList(Result,' '); + // Check if there are any words to remove. + if WordList.Count > 1 then + begin + // Select a random word to remove. + RandomWordIndex := Random(WordList.Count); + WordList.Delete(RandomWordIndex); + // Reconstruct the string from the remaining words. + Result := WordList.DelimitedText; + end; + // Free the TStringList to prevent memory leaks. + WordList.Free; +end; + +procedure LoadIntegersInCSV(filename: string; var aTokens: TNNetAAInteger; + MaxRows: integer = 0); +var + LargeFile: TextFile; + StrLine: string; + RowCnt, WordCnt: integer; + Separator: TNNetStringList; +begin + Separator := CreateTokenizedStringList(','); + RowCnt := 0; + //WriteLn('Counting rows from: ', filename); + AssignFile(LargeFile, filename); + Reset(LargeFile); + while (not Eof(LargeFile)) and ( (MaxRows=0) or (RowCnt 0 then + begin + for WordCnt := 0 to Separator.Count - 1 do + begin + aTokens[RowCnt][WordCnt] := StrToInt(Separator[WordCnt]); + end; + end; + RowCnt := RowCnt + 1; + end; + CloseFile(LargeFile); +end; + +function RemoveRandomChars(const Str: string; Count: integer): string; +var + i: integer; + StrLen: integer; +begin + Result := Str; + // Calculate the length of the string before removing characters. + StrLen := Length(Result); + if (Count > 0) and (StrLen>1) then + begin + // Loop for the number of characters to be removed. + for i := 1 to Count do + begin + // Check if the string is not empty. + if StrLen > 1 then + begin + // Randomly select a character position and remove one character from that position. + // The '+ 1' is necessary because Pascal strings are 1-indexed, not 0-indexed. + Delete(Result, Random(StrLen) + 1, 1); + Dec(StrLen); + end; + end; + end; +end; + + +function RandomSubstring(const InputString: string): string; +var + SpacePositions: TIntegerList; + I, RandomSpacePos: Integer; + InputStringLen: integer; +begin + InputStringLen := Length(InputString); + if InputStringLen > 0 then + begin + // Create a new integer list instance + SpacePositions := TIntegerList.Create; + // Find the positions of all spaces in the string + for I := 1 to InputStringLen do + begin + if InputString[I] = ' ' then + begin + SpacePositions.Add(I); + end; + end; + + // Append -1 to handle the case with no spaces + SpacePositions.Add(0); + + // Randomly select one of the space positions + RandomSpacePos := SpacePositions[Random(SpacePositions.Count)]; + + // Return the substring starting from the position after the random space + Result := Copy(InputString, RandomSpacePos + 1, InputStringLen - RandomSpacePos); + SpacePositions.Free; + end + else Result := ''; +end; + end. diff --git a/neural/neuralfit.pas b/neural/neuralfit.pas index 7fb44a97..06d6c039 100644 --- a/neural/neuralfit.pas +++ b/neural/neuralfit.pas @@ -35,6 +35,43 @@ interface ; type + TNeuralFitBase = class; + + // This is a base class for all optimizers + TNeuralOptimizer = class(TMObject) + private + FNN: TNNet; + FFit: TNeuralFitBase; + public + procedure SetNN(pNN: TNNet; pFit: TNeuralFitBase); + procedure Optimize(); virtual; abstract; + procedure ForceDeltaLimists(); + end; + + // SGD optimization method + TNeuralOptimizerSGD = class(TNeuralOptimizer) + public + procedure Optimize(); override; + end; + + // Adam optimization method + TNeuralOptimizerAdam = class(TNeuralOptimizer) + FBeta1: TNeuralFloat; + FBeta2: TNeuralFloat; + FEpsilon: TNeuralFloat; + FAdamInitialized: boolean; + public + constructor Create( + Beta1: TNeuralFloat = 0.9; + Beta2: TNeuralFloat = 0.999; + Epsilon: TNeuralFloat = 1e-07); overload; + + // Memory Initializer for Adam optimizer + function InitAdam(Beta1, Beta2, Epsilon: TNeuralFloat): TNNetLayer; + + procedure Optimize(); override; + end; + TCustomLearningRateScheduleFn = function(Epoch: integer): single; TCustomLearningRateScheduleObjFn = function(Epoch: integer): single of object; @@ -54,37 +91,39 @@ TNeuralFitBase = class(TMObject) FCurrentEpoch: integer; FCurrentStep: integer; FCurrentTrainingError: TNeuralFloat; + FCustomLearningRateScheduleFn: TCustomLearningRateScheduleFn; + FCustomLearningRateScheduleObjFn: TCustomLearningRateScheduleObjFn; + FDataAugmentation: boolean; + FFinishedThread: TNNetVolume; + {$IFDEF HASTHREADS}FCritSec: TRTLCriticalSection;{$ENDIF} FNN: TNNet; FGlobalHit: integer; FGlobalMiss: integer; FGlobalTotal: integer; FGlobalTotalLoss: single; FGlobalErrorSum: single; - FFinishedThread: TNNetVolume; - {$IFDEF HASTHREADS}FCritSec: TRTLCriticalSection;{$ENDIF} - FMultipleSamplesAtValidation: boolean; - FDataAugmentation: boolean; - FVerbose: boolean; + FInertia: single; FStaircaseEpochs: integer; FStepSize: integer; + FMaxEpochs: integer; + FMultipleSamplesAtValidation: boolean; + FVerbose: boolean; FLearningRateDecay: single; FInitialLearningRate: single; FCyclicalLearningRateLen: integer; FInitialEpoch: integer; - FMaxEpochs: integer; FMinLearnRate: single; FCurrentLearningRate: single; - FInertia: single; FL2Decay: TNeuralFloat; + FLogEveryBatches: integer; FFileNameBase: string; FClipDelta: single; FTargetAccuracy: single; - FCustomLearningRateScheduleFn: TCustomLearningRateScheduleFn; - FCustomLearningRateScheduleObjFn: TCustomLearningRateScheduleObjFn; FOnAfterStep, FOnAfterEpoch, FOnStart: TNotifyEvent; FRunning, FShouldQuit: boolean; FTrainingAccuracy, FValidationAccuracy, FTestAccuracy: TNeuralFloat; FMinBackpropagationError: TNeuralFloat; + FMinBackpropagationErrorProportion: TNeuralFloat; FLoadBestAdEnd: boolean; FTestBestAtEnd: boolean; {$IFDEF OpenCL} @@ -92,7 +131,11 @@ TNeuralFitBase = class(TMObject) FDeviceId: cl_device_id; {$ENDIF} FProcs: TNeuralThreadList; + FOptimizer: TNeuralOptimizer; + FOptimizerOwned: boolean; procedure CheckLearningRate(iEpochCount: integer); + procedure Optimize(); + procedure SetOptimizer(pOptimizer: TNeuralOptimizer); protected procedure DoAfterEpoch; virtual; procedure DoAfterStep; virtual; @@ -123,17 +166,19 @@ TNeuralFitBase = class(TMObject) property InitialEpoch: integer read FInitialEpoch write FInitialEpoch; property InitialLearningRate: single read FInitialLearningRate write FInitialLearningRate; property LearningRateDecay: single read FLearningRateDecay write FLearningRateDecay; - property MinLearnRate : single read FMinLearnRate write FMinLearnRate; property LoadBestAtEnd: boolean read FLoadBestAdEnd write FLoadBestAdEnd; + property LogEveryBatches: integer read FLogEveryBatches write FLogEveryBatches; property L2Decay: single read FL2Decay write FL2Decay; property MaxThreadNum: integer read FMaxThreadNum write FMaxThreadNum; property MinBackpropagationError: TNeuralFloat read FMinBackpropagationError write FMinBackpropagationError; + property MinBackpropagationErrorProportion: TNeuralFloat read FMinBackpropagationErrorProportion write FMinBackpropagationErrorProportion; property Momentum: single read FInertia write FInertia; property MultipleSamplesAtValidation: boolean read FMultipleSamplesAtValidation write FMultipleSamplesAtValidation; property NN: TNNet read FNN; property OnAfterStep: TNotifyEvent read FOnAfterStep write FOnAfterStep; property OnAfterEpoch: TNotifyEvent read FOnAfterEpoch write FOnAfterEpoch; property OnStart: TNotifyEvent read FOnStart write FOnStart; + property Optimizer: TNeuralOptimizer write SetOptimizer; property StaircaseEpochs: integer read FStaircaseEpochs write FStaircaseEpochs; property TargetAccuracy: single read FTargetAccuracy write FTargetAccuracy; property TestBestAtEnd: boolean read FTestBestAtEnd write FTestBestAtEnd; @@ -193,6 +238,7 @@ TNeuralDataLoadingFit = class(TNeuralFitWithImageBase) FGetTrainingPair, FGetValidationPair, FGetTestPair: TNNetGetPairFn; FGetTrainingProc, FGetValidationProc, FGetTestProc: TNNetGet2VolumesProc; function DefaultLossFn(ExpectedOutput, FoundOutput: TNNetVolume; ThreadId: integer): TNeuralFloat; + function DefaultLossInFirstPixelFn(ExpectedOutput, FoundOutput: TNNetVolume; ThreadId: integer): TNeuralFloat; public constructor Create(); procedure FitLoading(pNN: TNNet; @@ -206,7 +252,10 @@ TNeuralDataLoadingFit = class(TNeuralFitWithImageBase) procedure EnableBipolarHitComparison(); procedure EnableBipolar99HitComparison(); procedure EnableClassComparison(); + procedure EnableClassComparisonInFirstPixel(); procedure EnableDefaultImageTreatment(); override; + procedure EnableDefaultLoss; + procedure EnableDefaultLossInFirstPixel(); // On most cases, you should never call the following methods directly procedure RunNNThread(index, threadnum: integer); @@ -301,6 +350,7 @@ TNeuralImageFit = class(TNeuralFitWithImageBase) function BipolarCompare(A, B: TNNetVolume; ThreadId: integer): boolean; function BipolarCompare99(A, B: TNNetVolume; ThreadId: integer): boolean; function ClassCompare(A, B: TNNetVolume; ThreadId: integer): boolean; + function ClassCompareOnFirstPixel(A, B: TNNetVolume; ThreadId: integer): boolean; implementation uses @@ -368,6 +418,12 @@ function ClassCompare(A, B: TNNetVolume; ThreadId: integer): boolean; Result := (A.GetClass() = B.GetClass()); end; +function ClassCompareOnFirstPixel(A, B: TNNetVolume; ThreadId: integer + ): boolean; +begin + Result := (A.GetClassOnPixel(0, 0) = B.GetClassOnPixel(0, 0)); +end; + {$IFDEF FPC} { TNeuralImageLoadingFit } procedure TNeuralImageLoadingFit.GetTrainingProc(Idx: integer; @@ -478,6 +534,19 @@ procedure TNeuralImageLoadingFit.FitLoading(pNN: TNNet; pSizeX, end; {$ENDIF} +procedure TNeuralFitBase.Optimize(); +begin + if FOptimizer = nil then + begin + FOptimizer := TNeuralOptimizerSGD.Create(); + FOptimizerOwned := true; + end; + FOptimizer.SetNN(FNN, Self); + FOptimizer.Optimize(); + //Write(FNN.ForceMaxAbsoluteWeight(2):3:2,' '); + if FL2Decay > 0.0 then FNN.ComputeL2Decay(); +end; + constructor TNeuralFitWithImageBase.Create(); begin inherited Create(); @@ -508,6 +577,37 @@ function TNeuralDataLoadingFit.DefaultLossFn(ExpectedOutput, OutputValue := FoundOutput.FData[ ClassId ]; {$IFDEF Debug} + //if ClassId <> ExpectedOutput.GetClass() then + //begin + // FErrorProc( + // 'Error - classes do not match at TNeuralDataLoadingFit.DefaultLossFn:' + + // IntToStr(ClassId)+','+IntToStr(ExpectedOutput.GetClass()) + // ); + //end; + {$ENDIF} + + if (OutputValue > 0) then + begin + result := -Ln(OutputValue); + end + else + begin + FErrorProc('Error - invalid output value at loss function:' + FloatToStrF(OutputValue,ffFixed,6,4)); + result := 100; + end; +end; + +function TNeuralDataLoadingFit.DefaultLossInFirstPixelFn(ExpectedOutput, + FoundOutput: TNNetVolume; ThreadId: integer): TNeuralFloat; +var + ClassId: integer; + OutputValue: TNeuralFloat; +begin + ClassId := ExpectedOutput.Tag; + OutputValue := FoundOutput[0, 0, ClassId]; + + {$IFDEF Debug} + (* if ClassId <> ExpectedOutput.GetClass() then begin FErrorProc( @@ -515,6 +615,7 @@ function TNeuralDataLoadingFit.DefaultLossFn(ExpectedOutput, IntToStr(ClassId)+','+IntToStr(ExpectedOutput.GetClass()) ); end; + *) {$ENDIF} if (OutputValue > 0) then @@ -524,7 +625,7 @@ function TNeuralDataLoadingFit.DefaultLossFn(ExpectedOutput, else begin FErrorProc('Error - invalid output value at loss function:' + FloatToStrF(OutputValue,ffFixed,6,4)); - result := 1; + result := 100; end; end; @@ -657,7 +758,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, FTrainingAccuracy := AccuracyWithInertia/100; end; - if ( (FGlobalTotal > 0) and (I mod 10 = 0) ) then + if ( (FGlobalTotal > 0) and (I mod FLogEveryBatches = 0) ) then begin totalTimeSeconds := (Now() - startTime) * 24 * 60 * 60; if FVerbose then MessageProc @@ -803,13 +904,13 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, AssignFile(CSVFile, FileNameCSV); Append(CSVFile); - MessageProc( - 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(TrainingCnt/(FStepSize*10))/60,ffFixed,1,4)+' minutes.' + - ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(TrainingCnt/(FStepSize*10))/3600,ffFixed,1,4)+' hours.'); + MessageProc( + 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(TrainingCnt/(FStepSize*FLogEveryBatches))/60,ffFixed,1,4)+' minutes.' + + ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(TrainingCnt/(FStepSize*FLogEveryBatches))/3600,ffFixed,1,4)+' hours.'); - MessageProc( - 'Epochs: '+IntToStr(FCurrentEpoch)+ - '. Working time: '+FloatToStrF(Round((Now() - globalStartTime)*2400)/100,ffFixed,4,2)+' hours.'); + MessageProc( + 'Epochs: '+IntToStr(FCurrentEpoch)+ + '. Working time: '+FloatToStrF(Round((Now() - globalStartTime)*2400)/100,ffFixed,4,2)+' hours.'); DoAfterEpoch; end; @@ -1062,8 +1163,14 @@ procedure TNeuralDataLoadingFit.RunNNThread(index, threadnum: integer); LocalErrorSum := LocalErrorSum + CurrentError; if (CurrentError > FMinBackpropagationError) or - (CurrentError > FCurrentTrainingError/4) - then LocalNN.Backpropagate( vOutput ); + ( + (FCurrentTrainingError>0) and + (CurrentError > FCurrentTrainingError*FMinBackpropagationErrorProportion) + ) + then + begin + LocalNN.Backpropagate( vOutput ); + end; CurrentLoss := 0; if Assigned(FLossFn) then @@ -1357,8 +1464,6 @@ procedure TNeuralDataLoadingFit.FreeMemory(); end; procedure TNeuralDataLoadingFit.RunTrainingBatch(); -var - MaxDelta: TNeuralFloat; begin FGlobalHit := 0; FGlobalMiss := 0; @@ -1373,22 +1478,7 @@ procedure TNeuralDataLoadingFit.RunTrainingBatch(); {$ELSE} RunNNThread(0, 1); {$ENDIF} - if FClipDelta > 0 then - begin - FNN.ForceMaxAbsoluteDelta(FClipDelta); - end - else - begin - MaxDelta := FNN.NormalizeMaxAbsoluteDelta(); - if MaxDelta < 1 then - begin - MessageProc('Deltas have been multiplied by: '+FloatToStr(MaxDelta)+'.'+ - ' Max delta on layer: '+IntToStr(FNN.MaxDeltaLayer)+' - '+ - FNN.Layers[FNN.MaxDeltaLayer].ClassName+'.'); - end; - end; - FNN.UpdateWeights(); - if FL2Decay > 0.0 then FNN.ComputeL2Decay(); + Optimize(); end; procedure TNeuralDataLoadingFit.RunValidationBatch(ValidationSize: integer); @@ -1449,15 +1539,105 @@ procedure TNeuralDataLoadingFit.EnableClassComparison(); FInferHitFn := {$IFDEF FPC}@{$ENDIF}ClassCompare; end; +procedure TNeuralDataLoadingFit.EnableClassComparisonInFirstPixel(); +begin + FInferHitFn := {$IFDEF FPC}@{$ENDIF}ClassCompareOnFirstPixel; +end; + +procedure TNeuralDataLoadingFit.EnableDefaultLoss(); +begin + FLossFn := {$IFDEF FPC}@{$ENDIF}DefaultLossFn; +end; + +procedure TNeuralDataLoadingFit.EnableDefaultLossInFirstPixel(); +begin + FLossFn := {$IFDEF FPC}@{$ENDIF}DefaultLossInFirstPixelFn; +end; + procedure TNeuralDataLoadingFit.EnableDefaultImageTreatment(); begin inherited EnableDefaultImageTreatment(); EnableClassComparison(); - FLossFn := {$IFDEF FPC}@{$ENDIF}DefaultLossFn; + EnableDefaultLoss(); end; -{ TNeuralFitBase } +{ TNeuralOptimizer } + +procedure TNeuralOptimizer.SetNN(pNN: TNNet; pFit: TNeuralFitBase); +begin + FNN := pNN; + FFit := pFit; + FMessageProc := pFit.MessageProc; + FErrorProc := pFit.ErrorProc; +end; + +procedure TNeuralOptimizer.ForceDeltaLimists(); +var + MaxDelta: TNeuralFloat; +begin + if FFit.FClipDelta > 0 then + begin + {MaxDelta := }FNN.ForceMaxAbsoluteDelta(FFit.FClipDelta); + end + else + begin + //FNN.NormalizeMaxAbsoluteDeltaPerNeuron(FCurrentLearningRate); + //MaxDelta := FNN.NormalizeMinMaxAbsoluteDeltaPerLayer(FCurrentLearningRate, FCurrentLearningRate); + MaxDelta := FNN.NormalizeMaxAbsoluteDelta(); + if MaxDelta < 1 then + begin + MessageProc('Deltas have been multiplied by: '+FloatToStr(MaxDelta)+'.'+ + ' Max delta on layer: '+IntToStr(FNN.MaxDeltaLayer)+' - '+ + FNN.Layers[FNN.MaxDeltaLayer].ClassName+'.'); + end; + end; +end; +{ TNeuralOptimizerSGD } + +procedure TNeuralOptimizerSGD.Optimize(); +begin + ForceDeltaLimists(); + FNN.UpdateWeights(); +end; + +{ TNeuralOptimizerAdam } + +constructor TNeuralOptimizerAdam.Create(Beta1: TNeuralFloat; + Beta2: TNeuralFloat; Epsilon: TNeuralFloat); +begin + inherited Create(); + FBeta1 := Beta1; + FBeta2 := Beta2; + FEpsilon := Epsilon; + FAdamInitialized := false; +end; + +function TNeuralOptimizerAdam.InitAdam(Beta1, Beta2, Epsilon: TNeuralFloat + ): TNNetLayer; +var + LayerCnt: integer; +begin + for LayerCnt := 0 to FNN.GetLastLayerIdx() do + begin + FNN.Layers[LayerCnt].InitAdam(Beta1, Beta2, Epsilon); + end; + Result := FNN.GetLastLayer(); +end; + +procedure TNeuralOptimizerAdam.Optimize(); +begin + if not(FAdamInitialized) then + begin + InitAdam(FBeta1, FBeta2, FEpsilon); + FAdamInitialized := true; + end; + FNN.CalcAdamDelta(); + ForceDeltaLimists(); + FNN.UpdateWeightsAdam(); +end; + +{ TNeuralFitBase } constructor TNeuralFitBase.Create(); begin inherited Create(); @@ -1489,6 +1669,7 @@ constructor TNeuralFitBase.Create(); FCyclicalLearningRateLen := 0; // not cyclical by default. FInitialEpoch := 0; FMinBackpropagationError := 0; + FMinBackpropagationErrorProportion := 0.25; fMinLearnRate := FInitialLearningRate * 0.01; FInertia := 0.9; FClipDelta := 0.0; @@ -1509,10 +1690,14 @@ constructor TNeuralFitBase.Create(); FCurrentStep := 0; FLoadBestAdEnd := True; FTestBestAtEnd := True; + FLogEveryBatches := 10; + FOptimizer := nil; + FOptimizerOwned := false; end; destructor TNeuralFitBase.Destroy(); begin + if FOptimizerOwned and Assigned(FOptimizer) then FOptimizer.Free; {$IFDEF HASTHREADS} NeuralDoneCriticalSection(FCritSec); {$ENDIF} @@ -1613,6 +1798,14 @@ procedure TNeuralFitBase.CheckLearningRate(iEpochCount: integer); end; end; +procedure TNeuralFitBase.SetOptimizer(pOptimizer: TNeuralOptimizer); +begin + if FOptimizerOwned and Assigned(FOptimizer) then FOptimizer.Free; + FOptimizer := pOptimizer; + FOptimizerOwned := false; + FOptimizer.SetNN(FNN, Self); +end; + { TNeuralImageFit } constructor TNeuralImageFit.Create(); @@ -1628,6 +1821,7 @@ constructor TNeuralImageFit.Create(); FIsSoftmax := true; FMaxCropSize := 8; FMinBackpropagationError := 0.2; + FMinBackpropagationErrorProportion := 0.25; FMultipleSamplesAtValidation := true; FTrainingSampleProcessedCnt := TNNetVolume.Create; end; @@ -1690,7 +1884,6 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; LocalHasValidation: boolean; CSVFile: TextFile; CurrentAccuracy, AccuracyWithInertia: TNeuralFloat; - MaxDelta: TNeuralFloat; ValidationRecord: TNeuralFloat; begin FRunning := true; @@ -1774,7 +1967,9 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; ' Batch size:' + IntToStr(FBatchSize) + ' Step size:' + IntToStr(FStepSize) + ' Staircase epochs:' + IntToStr(FStaircaseEpochs) + - ' Min backprop error:' + FloatToStrF(MinBackpropagationError,ffFixed,4,2) + ' Min backprop error and proportion:' + + FloatToStrF(FMinBackpropagationError,ffFixed,4,2)+' '+ + FloatToStrF(FMinBackpropagationErrorProportion,ffFixed,4,2) ); if Assigned(FImgVolumes) then MessageProc('Training images: '+IntToStr(FImgVolumes.Count)); if Assigned(FImgValidationVolumes) then MessageProc('Validation images: '+IntToStr(FImgValidationVolumes.Count)); @@ -1818,24 +2013,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; {$ELSE} RunNNThread(0, 1); {$ENDIF} - if FClipDelta > 0 then - begin - FNN.ForceMaxAbsoluteDelta(FClipDelta); - end - else - begin - MaxDelta := FNN.NormalizeMaxAbsoluteDelta(); - if MaxDelta < 1 then - begin - MessageProc('Deltas have been multiplied by: '+FloatToStr(MaxDelta)+'.'+ - ' Max delta on layer: '+IntToStr(FNN.MaxDeltaLayer)+' - '+ - FNN.Layers[FNN.MaxDeltaLayer].ClassName+'.'); - end; - end; - FNN.UpdateWeights(); - //Write(FNN.ForceMaxAbsoluteWeight(2):3:2,' '); - if FL2Decay > 0.0 then FNN.ComputeL2Decay(); - + Optimize(); FGlobalTotal := (FGlobalHit + FGlobalMiss); if (FGlobalTotal > 0) then begin @@ -1861,7 +2039,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; FTrainingAccuracy := AccuracyWithInertia/100; end; - if ( (FGlobalTotal > 0) and (I mod 10 = 0) ) then + if ( (FGlobalTotal > 0) and (I mod FLogEveryBatches = 0) ) then begin totalTimeSeconds := (Now() - startTime) * 24 * 60 * 60; if FVerbose then MessageProc @@ -2027,8 +2205,8 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; Append(CSVFile); MessageProc( - 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(pImgVolumes.Count/(FStepSize*10))/60,ffFixed,1,4)+' minutes.' + - ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(pImgVolumes.Count/(FStepSize*10))/3600,ffFixed,1,4)+' hours.'); + 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(pImgVolumes.Count/(FStepSize*FLogEveryBatches))/60,ffFixed,1,4)+' minutes.' + + ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(pImgVolumes.Count/(FStepSize*FLogEveryBatches))/3600,ffFixed,1,4)+' hours.'); MessageProc( 'Epochs: '+IntToStr(FCurrentEpoch)+ @@ -2199,8 +2377,12 @@ procedure TNeuralImageFit.RunNNThread(index, threadnum: integer); OutputValue := Max(OutputValue, 0.001); end; - if (CurrentError>FMinBackpropagationError) or - (CurrentError>FCurrentTrainingError/4) then + if + (CurrentError > FMinBackpropagationError) or + ( + (FCurrentTrainingError>0) and + (CurrentError > FCurrentTrainingError*FMinBackpropagationErrorProportion) + ) then begin LocalNN.Backpropagate(vOutput); end @@ -2218,7 +2400,7 @@ procedure TNeuralImageFit.RunNNThread(index, threadnum: integer); else begin FErrorProc('Error - invalid output value at loss function:' + FloatToStrF(OutputValue,ffFixed,6,4)); - CurrentLoss := 1; + CurrentLoss := 100; end; LocalTotalLoss := LocalTotalLoss + CurrentLoss; @@ -2391,7 +2573,7 @@ procedure TNeuralImageFit.TestNNThread(index, threadnum: integer); 'Error - invalid output value at loss function:' + FloatToStrF(OutputValue,ffFixed,6,4) ); - CurrentLoss := 1; + CurrentLoss := 100; end; LocalTotalLoss := LocalTotalLoss + CurrentLoss; @@ -2520,6 +2702,7 @@ procedure TNeuralFitWithImageBase.EnableDefaultImageTreatment(); FHasMakeGray := True; FMaxCropSize := 8; FMinBackpropagationError := 0.2; + FMinBackpropagationErrorProportion := 0.25; FMultipleSamplesAtValidation := True; end; diff --git a/neural/neuralnetwork.pas b/neural/neuralnetwork.pas index 9b18de7c..78da221b 100644 --- a/neural/neuralnetwork.pas +++ b/neural/neuralnetwork.pas @@ -1,6 +1,6 @@ (* neuralnetwork -Copyright (C) 2017 Joao Paulo Schwarz Schuler +Copyright (C) 2023 Joao Paulo Schwarz Schuler This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by @@ -21,43 +21,7 @@ (* // coded, adapted and ported by Joao Paulo Schwarz Schuler -// https://sourceforge.net/p/cai/ ----------------------------------------------- -You can find simple to understand examples at: -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/supersimple/ -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/supersimplecorrelation/ ----------------------------------------------- -There are CIFAR-10 examples at: -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/testcnnalgo/testcnnalgo.lpr -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/visualCifar10BatchUpdate/ -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/visualCifar10OpenCL/ ----------------------------------------------- -Example - How to Create Your Network -NumClasses := 10; -NN := TNNet.Create(); -NN.AddLayer( TNNetInput.Create(32,32,3) ); -NN.AddLayer( TNNetConvolutionReLU.Create( 16,5,0,0) ); -NN.AddLayer( TNNetMaxPool.Create(2) ); -NN.AddLayer( TNNetConvolutionReLU.Create(128,5,0,0) ); -NN.AddLayer( TNNetMaxPool.Create(2) ); -NN.AddLayer( TNNetConvolutionReLU.Create(128,5,0,0) ); -NN.AddLayer( TNNetLayerFullConnectReLU.Create(64) ); -NN.AddLayer( TNNetLayerFullConnect.Create(NumClasses) ); -NN.SetLearningRate(0.01,0.8); ----------------------------------------------- -Example - How to create a simple fully forward connected network 3x3 -NN := TNNet.Create(); -NN.AddLayer( TNNetInput.Create(3) ); -NN.AddLayer( TNNetLayerFullConnectReLU.Create(3) ); -NN.AddLayer( TNNetLayerFullConnectReLU.Create(3) ); -NN.SetLearningRate(0.01,0.8); ----------------------------------------------- -Example - How to Train Your Network -// InputVolume and vDesiredVolume are of the type TNNetVolume -NN.Compute(InputVolume); -NN.GetOutput(PredictedVolume); -vDesiredVolume.SetClassForReLU(DesiredClass); -NN.Backpropagate(vDesiredVolume); +// https://github.com/joaopauloschuler/neural-api/ ---------------------------------------------- Interesting links: http://cs.stanford.edu/people/karpathy/convnetjs/demo/cifar10.html @@ -89,18 +53,26 @@ interface const csMaxInterleavedSize: integer = 95; + csNNetMaxParameterIdx = 7; type + TNNet = class; + TNNetLayer = class; + { TNNetNeuron } TNNetNeuron = class (TMObject) protected FWeights: TNNetVolume; FBackInertia: TNNetVolume; + FBackInertia2: TNNetVolume; FDelta: TNNetVolume; + FDelta2: TNNetVolume; + FParentLayer: TNNetLayer; private FBiasWeight: TNeuralFloat; FBiasInertia: TNeuralFloat; + FBiasInertia2: TNeuralFloat; FBiasDelta: TNeuralFloat; public constructor Create(); @@ -109,6 +81,9 @@ TNNetNeuron = class (TMObject) procedure Fill(Value:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} procedure AddInertia(); {$IFDEF Release} inline; {$ENDIF} procedure UpdateWeights(Inertia:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} + procedure UpdateWeightsWithoutInertia(); {$IFDEF Release} inline; {$ENDIF} + procedure CalcAdamDelta(); + procedure UpdateWeightsAdam(); {$IFDEF Release} inline; {$ENDIF} function SaveToString(): string; procedure LoadFromString(strData: string); procedure ClearDelta; {$IFDEF Release} inline; {$ENDIF} @@ -136,6 +111,8 @@ TNNetNeuron = class (TMObject) procedure InitHeGaussianDepthwise(Value: TNeuralFloat = 1); // Weight Initializer for SELU activation function. procedure InitSELU(Value: TNeuralFloat = 1); + // Memory Initializer for Adam Optimizer + procedure InitAdam(ParentLayer: TNNetLayer); property Weights: TNNetVolume read FWeights; property Bias: TNeuralFloat read FBiasWeight; @@ -165,11 +142,6 @@ TNNetNeuronList = class (TNNetList) procedure InitForDebug(); end; - const - csNNetMaxParameterIdx = 7; - - type - TNNet = class; /// neural network layer TNNetLayer = class(TMObject) protected @@ -187,12 +159,17 @@ TNNetLayer = class(TMObject) FSuppressBias: integer; // Fast access to TNNetNeuron FArrNeurons: array of TNNetNeuron; - FInertia: TNeuralFloat; FPrevLayer: TNNetLayer; FLearningRate: TNeuralFloat; FL2Decay: TNeuralFloat; + // Adam settings + FBeta1, FBeta2, FEpsilon: TNeuralFloat; + FBeta1Decay, FBeta2Decay: TNeuralFloat; + FOneMinusBeta1Decay, FOneMinusBeta2Decay: TNeuralFloat; + FStruct: array [0..csNNetMaxParameterIdx] of integer; + FFloatSt: array [0..csNNetMaxParameterIdx] of TNeuralFloat; //backpropagation properties FDepartingBranchesCnt: integer; @@ -261,6 +238,7 @@ TNNetLayer = class(TMObject) function ForceMaxAbsoluteDelta(vMax: TNeuralFloat): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function ForceMaxAbsoluteWeight(vMax: TNeuralFloat): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetMaxAbsoluteDelta(): TNeuralFloat; virtual; + procedure NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); procedure GetMinMaxAtDepth(pDepth: integer; var pMin, pMax: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} // Returns the sum of all weights from all neurons in the layer. function GetWeightSum(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} @@ -301,6 +279,8 @@ TNNetLayer = class(TMObject) function SaveStructureToString(): string; virtual; procedure SetBatchUpdate(pBatchUpdate: boolean); {$IFDEF Release} inline; {$ENDIF} procedure UpdateWeights(); {$IFDEF Release} inline; {$ENDIF} + procedure CalcAdamDelta(); {$IFDEF Release} inline; {$ENDIF} + procedure UpdateWeightsAdam(); {$IFDEF Release} inline; {$ENDIF} function InitBasicPatterns(): TNNetLayer; // Increments an internal counter that counts how many branches load @@ -338,6 +318,9 @@ TNNetLayer = class(TMObject) function InitGlorotBengioUniform(Value: TNeuralFloat = 1): TNNetLayer; // Weight Initializer for SELU activation function. function InitSELU(Value: TNeuralFloat = 1): TNNetLayer; + // Memory Initializer for Adam optimizer + function InitAdam(Beta1, Beta2, Epsilon: TNeuralFloat): TNNetLayer; + procedure InitDefault(); virtual; property ActivationFn: TNeuralActivationFunction read FActivationFn write FActivationFn; @@ -357,6 +340,11 @@ TNNetLayer = class(TMObject) property BackwardTime: double read FBackwardTime write FBackwardTime; property ForwardTime: double read FForwardTime write FForwardTime; property LinkedNeurons: boolean read FLinkedNeurons; + {$IFDEF OpenCL} + property HasOpenCL: boolean read FHasOpenCL; + property ShouldOpenCL:boolean read FShouldOpenCL; + {$ENDIF} + end; TNNetLayerClass = class of TNNetLayer; @@ -416,6 +404,24 @@ TNNetInput = class(TNNetInputBase) function DisableErrorCollection: TNNetInput; end; + // This layer transposes the X and Depth axis. + TNNetTransposeXD = class(TNNetLayer) + private + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + procedure Compute(); override; + procedure Backpropagate(); override; + end; + + // This layer transposes the Y and Depth axis. + TNNetTransposeYD = class(TNNetLayer) + private + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// This layer copies the input to the output and can be used as a base class // to your new layers. TNNetIdentity = class(TNNetLayer) @@ -442,11 +448,42 @@ TNNetPad = class(TNNetLayer) FPadding: integer; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public + constructor Create; overload; override; constructor Create(Padding: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; + /// Padding layer: adds padding to the input. + // This layer is similar to TNNetPad except that it allows you to add distinct + // paddings to X and Y. + // This layer has no trainable parameter. Adding a padding layer may be + // more efficient than padding at the convolutional layer. + TNNetPadXY = class(TNNetLayer) + private + FPaddingX, FPaddingY: integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(PaddingX, PaddingY: integer); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + + { TNNetCrop } + + TNNetCrop = class(TNNetLayer) + private + FStartX, FStartY: integer; + FLenX, FLenY: integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(StartX, StartY, LenX, LenY: integer); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// Base class to be used with layers that aren't compatible with L2 TNNetIdentityWithoutL2 = class(TNNetIdentity) private @@ -489,6 +526,13 @@ TNNetReLU = class(TNNetReLUBase) procedure Compute(); override; end; + /// This is almost the same as ReLU except that it doesn't + // backpropagate on zero values (Positive only) + TNNetReLUP = class(TNNetReLUBase) + public + procedure Compute(); override; + end; + /// This is a leaky ReLU with minimum and maximum values. You can // scale leakiness via the Leaky parameter. TNNetReLUL = class(TNNetReLUBase) @@ -593,7 +637,7 @@ TNNetHyperbolicTangent = class(TNNetSigmoid) // learning but can also provoke overflows. TNNetMulLearning = class(TNNetIdentity) public - constructor Create(pMul: integer); reintroduce; overload; + constructor Create(pMul: TNeuralFloat); reintroduce; overload; procedure Backpropagate(); override; end; @@ -617,6 +661,61 @@ TNNetAddAndDiv = class(TNNetIdentity) procedure Compute(); override; end; + { TNNetAddPositionalEmbedding } + // Adds positional embedding as per paper "Attention Is All You Need". + // https://arxiv.org/abs/1706.03762 . + TNNetAddPositionalEmbedding = class(TNNetIdentity) + private + FPositionalEmbedding: TNNetVolume; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(n: integer); reintroduce; overload; + destructor Destroy(); override; + + procedure Compute(); override; + end; + + { TNNetEmbedding } + // Do not use this layer. It's under construction. + TNNetEmbedding = class(TNNetLayer) + private + FVocabSize: integer; + FEmbeddingSize: integer; + FScaleEmbedding: TNeuralFloat; + FEncodeZero: boolean; + FInputTokens: array of integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(pVocabSize, pEmbeddingSize: integer; + EncodeZero: integer = 0; ScaleEmbedding: TNeuralFloat = 2); reintroduce; overload; + destructor Destroy; override; + + procedure InitDefault(); override; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + + { TNNetTokenAndPositionalEmbedding } + // Do not use this layer. It's under construction. + TNNetTokenAndPositionalEmbedding = class(TNNetEmbedding) + private + FPositionalEmbedding: TNNetVolume; + FPositionalEmbeddingN: integer; + FScalePositional: TNeuralFloat; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create(pVocabSize, pEmbeddingSize: integer; + EncodeZero: integer = 0; + ScaleEmbedding: TNeuralFloat = 2; + ScalePositional: TNeuralFloat = 1; + PositionalEmbeddingN: integer = 0); + destructor Destroy; override; + + procedure Compute(); override; + end; + TNNetAddNoiseBase = class(TNNetIdentity) protected FEnabled: boolean; @@ -699,8 +798,21 @@ TNNetMovingStdNormalization = class(TNNetIdentityWithoutL2) function GetMaxAbsoluteDelta(): TNeuralFloat; override; end; + // This layer is experimental. Do not use. + TNNetMovingScale = class(TNNetIdentityWithoutL2) + private + FChangeRate: TNeuralFloat; + FMaxTarget: TNeuralFloat; + public + constructor Create; overload; override; + constructor Create(pMaxTarget: TNeuralFloat; pChangeRate: TNeuralFloat); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + procedure InitDefault(); override; + end; + // This is an experimental layer. Do not use it. - TNNetScaleLearning = class(TNNetMovingStdNormalization) + TNNetScaleLearning = class(TNNetIdentity) public procedure Compute(); override; procedure Backpropagate(); override; @@ -909,6 +1021,27 @@ TNNetSum = class(TNNetConcatBase) procedure Backpropagate(); override; end; + /// This layer is under construction. DO NOT USE IT. + // This layer run the TNNetVolume.DotProducts for layers A and B. + TNNetDotProducts = class(TNNetLayer) + private + FA: TNNetLayer; // Layer A + FB: TNNetLayer; // Layer B + FAError: TNNetVolume; // Layer A: Error + FBError: TNNetVolume; // Layer B: Error + FAT: TNNetVolume; // Layer A: Output Transposed + FBT: TNNetVolume; // Layer B: Output Transposed + FET: TNNetVolume; // Error Transposed + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create(A, B: TNNetLayer); reintroduce; overload; + constructor Create(AIdx, BIdx: integer); reintroduce; overload; + destructor Destroy(); override; + + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// picks/splits from previous layer selected channels. TNNetSplitChannels = class(TNNetLayer) private @@ -957,8 +1090,7 @@ TNNetFullConnect = class(TNNetLayerConcatedWeights) //FullyConnectedLayers TNNetFullConnectClass = class of TNNetFullConnect; - /// Fully connected layer without activation function. This layer is useful - // before softmax layers. + /// Fully connected layer without activation function. TNNetFullConnectLinear = class(TNNetFullConnect) private procedure ComputePreviousLayerErrorCPU(); override; @@ -977,6 +1109,7 @@ TNNetFullConnectSigmoid = class(TNNetFullConnect) end; /// Fully connected layer with ReLU. + // This layer is useful before softmax layers. TNNetFullConnectReLU = class(TNNetFullConnectLinear) private procedure ComputePreviousLayerErrorCPU(); override; @@ -999,8 +1132,22 @@ TNNetFullConnectDiff = class(TNNetFullConnectReLU) procedure Backpropagate(); override; end; + // Pointwise softmax operation. + TNNetPointwiseSoftMax = class(TNNetIdentity) + protected + FSkipBackpropDerivative: boolean; + public + // Although skipping the derivative calculation is a non standard usage, + // skipping the derivative can give higher classification accuracy at + // image classification tasks with 10x smaller learning rate. + constructor Create; overload; override; + constructor Create(SkipBackpropDerivative: integer); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// Common softmax layer. - TNNetSoftMax = class(TNNetIdentity) + TNNetSoftMax = class(TNNetPointwiseSoftMax) protected FSoftTotalSum: TNeuralFloat; public @@ -1106,8 +1253,13 @@ TNNetConvolutionClass = class of TNNetConvolutionBase; /// This layer is under construction. DO NOT USE IT. TNNetGroupedConvolutionLinear = class(TNNetConvolutionBase) private - FArrGroupId: array of integer; - FArrGroupIdStart: array of integer; + FArrPrevLayerGroupId: array of integer; + FArrPrevLayerGroupIdStart: array of integer; + FOutputGroupId: array of integer; + FOutputGroupIdStart: array of integer; + FGroupIdToPrevLayerIdStart: array of integer; + FGroupIdToOutputIdStart: array of integer; + FMaxPrevX, FMaxPrevY: integer; procedure PrepareInputForGroupedConvolutionFast(); procedure ComputeCPU(); @@ -1318,12 +1470,31 @@ TNNetPoolBase = class(TNNetLayer) /// DEFAULT CAI maxpool layer. TNNetMaxPool = class(TNNetPoolBase) private - procedure ComputeDefaultStride(); - procedure ComputeWithStride(); + procedure ComputeDefaultStride(); virtual; + procedure ComputeWithStride(); virtual; public procedure Compute(); override; end; + // This layer implements a maxpool that also stores the position + // of the maximum values. + TNNetMaxPoolWithPosition = class(TNNetMaxPool) + private + FLogPosX, FLogPosY: boolean; + FExtraSize: integer; + FPosX, FPosY: array of TNeuralFloat; + FMaxBackpropX: integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + procedure ComputeDefaultStride(); override; + procedure ComputeWithStride(); override; + procedure ComputePositions(); + public + constructor Create(pPoolSize: integer; pStride: integer; pPadding: integer; + pLogPosX: integer; pLogPosY: integer; pMaxBackpropX: integer = 0); + destructor Destroy(); override; + procedure Backpropagate(); override; + end; + /// PORTABLE maxpool layer (similar to other APIs) TNNetMaxPoolPortable = class(TNNetMaxPool) private @@ -1445,6 +1616,8 @@ TNNet = class(TMObject) Groups, pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0; ChannelInterleaving: boolean = True): TNNetLayer; + function AddGroupedDotProducts(A,B: TNNetLayer; Groups: integer; ChannelInterleaving: boolean): TNNetLayer; + function AddGroupedPointwiseSoftMax(Groups: integer; ChannelInterleaving: boolean): TNNetLayer; /// AddAutoGroupedPointwiseConv implements // pointwise convolutions of the kEffNet architecture // described on the paper: "Grouped Pointwise Convolutions Significantly @@ -1503,8 +1676,16 @@ TNNet = class(TMObject) function AddAvgMaxPool(pPoolSize: integer; pMaxPoolDropout: TNeuralFloat = 0; pKeepDepth:boolean = false; pAfterLayer: TNNetLayer = nil): TNNetLayer; function AddMinMaxChannel(pAfterLayer: TNNetLayer = nil): TNNetLayer; function AddAvgMaxChannel(pMaxPoolDropout: TNeuralFloat = 0; pKeepDepth:boolean = false; pAfterLayer: TNNetLayer = nil): TNNetLayer; + // Transformers, AddSingleHeadSelfAttention and AddSingleHeadTransformerBlock are under construction - do not use it + procedure AddSingleHeadSelfAttention(out Attended, W: TNNetLayer); + function AddSelfAttention(Heads: integer): TNNetLayer; + function AddSelfAttentionCAI(Heads: integer): TNNetLayer; + procedure AddSingleHeadTransformerBlock(out Result, W: TNNetLayer; HasNorm: boolean = False); + function AddTransformerBlock(Heads: integer; IntermediateDim: integer; HasNorm: boolean = False): TNNetLayer; + function AddTransformerBlockCAI(Heads: integer; IntermediateDim: integer; HasNorm: boolean = False): TNNetLayer; procedure AddToExponentialWeightAverage(NewElement: TNNet; Decay: TNeuralFloat); procedure AddToWeightAverage(NewElement: TNNet; CurrentElementCount: integer); + function GetFirstLayer(): TNNetLayer; // Returns the layer index of the first neuronal layer (layers that have neurons). function GetFirstNeuronalLayerIdx(FromLayerIdx:integer = 0): integer; {$IFDEF Release} inline; {$ENDIF} // Returns the layer index of the first neuronal layer that can process an image as input. @@ -1556,6 +1737,8 @@ TNNet = class(TMObject) procedure SetBatchUpdate(pBatchUpdate: boolean); {$IFDEF Release} inline; {$ENDIF} procedure InitWeights(); procedure UpdateWeights(); {$IFDEF Release} inline; {$ENDIF} + procedure CalcAdamDelta(); + procedure UpdateWeightsAdam(); {$IFDEF Release} inline; {$ENDIF} procedure ClearDeltas(); {$IFDEF Release} inline; {$ENDIF} procedure ResetBackpropCallCurrCnt(); {$IFDEF Release} inline; {$ENDIF} procedure SetL2Decay(pL2Decay: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} @@ -1579,6 +1762,9 @@ TNNet = class(TMObject) function ForceMaxAbsoluteWeight(vMax: TNeuralFloat): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetMaxAbsoluteDelta(): TNeuralFloat; function NormalizeMaxAbsoluteDelta(NewMax: TNeuralFloat = 0.1): TNeuralFloat; + function NormalizeMinAbsoluteDeltaPerLayer(MinDelta: TNeuralFloat = 0.001): TNeuralFloat; + function NormalizeMinMaxAbsoluteDeltaPerLayer(MinDelta, MaxDelta: TNeuralFloat): TNeuralFloat; + procedure NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); procedure ClearInertia(); {$IFDEF Release} inline; {$ENDIF} procedure ClearBias(); {$IFDEF Release} inline; {$ENDIF} @@ -1878,6 +2064,14 @@ TEasyBytePredictionViaNNet = class(TBytePredictionViaNNet) Threshold: TNeuralFloat = 0.5 ); + // Simple character based NLP function for building a string from characters. + function GenerateStringFromChars(NN: TNNet; InputString: string; oSampler: TNNetSamplerBase = nil): string; overload; + + // Takes a neural network (NN) and an input string, and returns the predicted class as an integer. + function GetClassFromChars(NN: TNNet; InputString: string): integer; + + function GenerateStringFromTokens(NN: TNNet; Dict:TStringListInt; InputString: string; oSampler: TNNetSamplerBase = nil): string; + implementation procedure RebuildPatternOnPreviousPatterns @@ -1990,126 +2184,868 @@ procedure RebuildNeuronListOnPreviousPatterns end; end; -{ TNNetGroupedPointwiseConvHardSwish } -constructor TNNetGroupedPointwiseConvHardSwish.Create(pNumFeatures, - pGroups: integer; pSuppressBias: integer); +function GenerateStringFromChars(NN: TNNet; InputString: string; + oSampler: TNNetSamplerBase): string; +var + InputVolume, OutputVolume: TNNetVolume; + NextTokenInt: integer; + NextTokenChar: char; + AB: array [0..0] of byte; +begin + InputVolume := TNNetVolume.Create(NN.GetFirstLayer.Output); + OutputVolume := TNNetVolume.Create(NN.GetLastLayer().Output); + repeat + InputVolume.OneHotEncodingReversed(InputString); + NN.Compute(InputVolume, OutputVolume); + if (OutputVolume.Size = 8) then + begin + OutputVolume.ReadAsBits(AB, 0.5); + NextTokenInt := AB[0]; + end + else + begin + if Assigned(oSampler) + then NextTokenInt := oSampler.GetToken(OutputVolume) + else NextTokenInt := OutputVolume.GetClass(); + end; + NextTokenChar := Char(NextTokenInt); + if NextTokenInt > 1 then InputString := InputString + NextTokenChar; + until (NextTokenInt < 2) or (Length(InputString)>=InputVolume.SizeX); + Result := InputString; + InputVolume.Free; + OutputVolume.Free; +end; + +// Takes a neural network (NN) and an input string, +// and returns the predicted class as an integer. +function GetClassFromChars(NN: TNNet; InputString: string): integer; +var + InputVolume: TNNetVolume; // Declare a variable for the input volume. begin - inherited Create(pNumFeatures, pGroups, pSuppressBias); - FActivationFn := @HardSwish; - FActivationFnDerivative := @HardSwishDerivative; + // Create a new TNNetVolume based on the output size of the first layer of the neural network. + InputVolume := TNNetVolume.Create(NN.GetFirstLayer.Output); + + // Convert the input string into a one-hot encoded volume, which is the format + // expected by the neural network for processing. + InputVolume.OneHotEncodingReversed(InputString); + + // Run the forward pass of the neural network with the one-hot encoded input. + NN.Compute(InputVolume); + + // After the network has computed the output, retrieve the class with the highest + // probability from the last layer's output. + Result := NN.GetLastLayer().Output.GetClass(); + + // Release the memory allocated for the input volume to prevent memory leaks. + InputVolume.Free; end; -{ TNNetHardSwish } +function GenerateStringFromTokens(NN: TNNet; Dict: TStringListInt; + InputString: string; oSampler: TNNetSamplerBase): string; +var + InputVolume, OutputVolume: TNNetVolume; + NextTokenInt: integer; + NextTokenStr: string; + Tokens: TNeuralIntegerArray; + TokenCnt: integer; +begin + InputVolume := TNNetVolume.Create(NN.GetFirstLayer.Output); + OutputVolume := TNNetVolume.Create(NN.GetLastLayer().Output); + Result := InputString; + Dict.StringToIntegerArray(InputString, Tokens); + TokenCnt := Length(Tokens); + repeat + InputVolume.CopyReversedNoChecksIntArr(Tokens); + NN.Compute(InputVolume, OutputVolume); + if Assigned(oSampler) + then NextTokenInt := oSampler.GetToken(OutputVolume) + else NextTokenInt := OutputVolume.GetClass(); + if NextTokenInt < Dict.Count then + begin + NextTokenStr := Dict.IntegerToWord(NextTokenInt); + Result := Result + ' ' + NextTokenStr; + end; + TokenCnt := TokenCnt + 1; + SetLength(Tokens, TokenCnt); + Tokens[TokenCnt - 1] := NextTokenInt; + until (NextTokenInt < 2) or (TokenCnt>=InputVolume.SizeX); + SetLength(Tokens, 0); + InputVolume.Free; + OutputVolume.Free; +end; -procedure TNNetHardSwish.Compute(); +{ TNNetMovingScale } + +constructor TNNetMovingScale.Create(pMaxTarget: TNeuralFloat; pChangeRate: TNeuralFloat); +begin + inherited Create; + InitDefault(); + FMaxTarget := pMaxTarget; + FChangeRate := pChangeRate; + FFloatSt[0] := pMaxTarget; + FFloatSt[1] := pChangeRate; +end; + +procedure TNNetMovingScale.Compute; var - SizeM1: integer; - LocalPrevOutput: TNNetVolume; - OutputCnt: integer; StartTime: double; - x: TNeuralFloat; + Multiplier: TNeuralFloat; begin StartTime := Now(); - LocalPrevOutput := FPrevLayer.Output; - SizeM1 := LocalPrevOutput.Size - 1; + inherited Compute; + Multiplier := FNeurons[0].FWeights.FData[0]; + if Multiplier <= 0.001 then + begin + Multiplier := 0.001; + end; + if (Multiplier<>1) then + begin + FOutput.Mul(Multiplier); + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; - if (FOutput.Size = FOutputError.Size) and (FOutputErrorDeriv.Size = FOutput.Size) then +constructor TNNetMovingScale.Create; +begin + inherited; + InitDefault(); + FMaxTarget := 1; + FChangeRate := 1; + FFloatSt[0] := FMaxTarget; + FFloatSt[1] := FChangeRate; +end; + +procedure TNNetMovingScale.Backpropagate; +var + StartTime: double; + MaxAbs: TNeuralFloat; + Multiplier, Diff: TNeuralFloat; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + StartTime := Now(); + Multiplier := FNeurons[0].FWeights.FData[0]; + if Multiplier <= 0.001 then begin - for OutputCnt := 0 to SizeM1 do - begin - x := LocalPrevOutput.FData[OutputCnt]; - if x > 3 then - begin - FOutput.FData[OutputCnt] := x; - FOutputErrorDeriv.FData[OutputCnt] := 1; - end - else if x < -3 then - begin - FOutput.FData[OutputCnt] := 0; - FOutputErrorDeriv.FData[OutputCnt] := 0; - end - else - begin - FOutput.FData[OutputCnt] := x*(x + 3)/6; - FOutputErrorDeriv.FData[OutputCnt] := 0.3333*x + 0.5; - end; - end; - end - else + Multiplier := 0.001; + end; + MaxAbs := FOutput.GetMaxAbs(); + if MaxAbs <> 0 then begin - // can't calculate error on input layers. - for OutputCnt := 0 to SizeM1 do + Diff := FMaxTarget-MaxAbs; + if (Diff < 0) or (Diff>0.75) then begin - x := LocalPrevOutput.FData[OutputCnt]; - if x > 3 then - begin - FOutput.FData[OutputCnt] := x; - end - else if x < -3 then - begin - FOutput.FData[OutputCnt] := 0; - end - else + FNeurons[0].FDelta.Add(0, 0, 0, (Diff)*FLearningRate*FChangeRate); + if (not FBatchUpdate) then begin - FOutput.FData[OutputCnt] := x*(x + 3)/6; + FNeurons[0].UpdateWeights(FInertia); + AfterWeightUpdate(); end; end; end; - FForwardTime := FForwardTime + (Now() - StartTime); + if (Multiplier > 0) and (Multiplier <> 1) then + begin + FOutputError.Mul(Multiplier); + //if Random(100)=0 then WriteLn(MaxAbs,' ->', Multiplier:10:8); + end; + FBackwardTime := FBackwardTime + (Now() - StartTime); + inherited Backpropagate(); end; -{ TNNetConvolutionHardSwish } +procedure TNNetMovingScale.InitDefault; +begin + if FNeurons.Count < 1 then AddMissingNeurons(1); + SetNumWeightsForAllNeurons(1, 1, 1); + FNeurons[0].FWeights.FData[0] := 1; +end; -constructor TNNetConvolutionHardSwish.Create(pNumFeatures, pFeatureSize, - pInputPadding, pStride: integer; pSuppressBias: integer); +{ TNNetDotProducts } + +procedure TNNetDotProducts.SetPrevLayer(pPrevLayer: TNNetLayer); begin - inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); - FActivationFn := @HardSwish; - FActivationFnDerivative := @HardSwishDerivative; + inherited SetPrevLayer(pPrevLayer); + FA := pPrevLayer.NN.Layers[ FStruct[0] ]; + FB := pPrevLayer.NN.Layers[ FStruct[1] ]; + if FA.Output.Depth <> FB.Output.Depth then + begin + FErrorProc( + 'TNNetDotProducts - Depths differ '+ + IntToStr(FA.Output.Depth) + ' ' + + IntToStr(FB.Output.Depth) + '.' + ); + end; + FOutput.ReSize( + FB.Output.SizeX, + FB.Output.SizeY, + FA.Output.SizeX * FA.Output.SizeY + ); + FOutputError.Resize(FOutput); + FOutputErrorDeriv.Resize(FOutput); end; -{ TNNetConvolutionSwish } +constructor TNNetDotProducts.Create(A, B: TNNetLayer); +begin + Self.Create(A.LayerIdx, B.LayerIdx); +end; -constructor TNNetConvolutionSwish.Create(pNumFeatures, pFeatureSize, - pInputPadding, pStride: integer; pSuppressBias: integer); +constructor TNNetDotProducts.Create(AIdx, BIdx: integer); begin - inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); - FActivationFn := @Swish; - FActivationFnDerivative := @SwishDerivative; + inherited Create; + FAT := TNNetVolume.Create(); + FBT := TNNetVolume.Create(); + FET := TNNetVolume.Create(); + FAError := TNNetVolume.Create(); + FBError := TNNetVolume.Create(); + + FStruct[0] := AIdx; + FStruct[1] := BIdx; end; -{ TNNetScaleLearning } +destructor TNNetDotProducts.Destroy; +begin + FAError.Free; + FBError.Free; + FET.Free; + FBT.Free; + FAT.Free; + inherited Destroy; +end; -procedure TNNetScaleLearning.Compute(); +procedure TNNetDotProducts.Compute; +var + StartTime: double; begin - FOutput.CopyNoChecks(FPrevLayer.FOutput); + StartTime := Now(); + FOutput.DotProductsPointwise(FA.Output, FB.Output); + FForwardTime := FForwardTime + (Now() - StartTime); end; -procedure TNNetScaleLearning.Backpropagate(); +procedure TNNetDotProducts.Backpropagate; var StartTime: double; - MagnitudeDelta: TNeuralFloat; - Magnitude: TNeuralFloat; begin + StartTime := Now(); Inc(FBackPropCallCurrentCnt); if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; - StartTime := Now(); - if FNeurons[0].Weights.FData[1] > 1 then - begin - FOutputError.Mul(FNeurons[0].Weights.FData[1]); - end; - Magnitude := FOutput.GetMagnitude(); - MagnitudeDelta := (1-Magnitude); - if (MagnitudeDelta>0) or (FNeurons[0].Weights.FData[1] > 0) then - begin - FNeurons[0].FDelta.Add(0,0,1, NeuronForceRange(MagnitudeDelta, FLearningRate*10) ); + + FAT.CopyTransposingAs2D(FA.Output); + FBT.CopyTransposingAs2D(FB.Output); + FET.CopyTransposingAs2D(FOutputError); + + FAError.DotProductsPointwise(FBT, FET); + FBError.DotProductsPointwise(FAT, FOutputError); + + FA.OutputError.Add(FAError); + FB.OutputError.Add(FBError); + + FBackwardTime := FBackwardTime + (Now() - StartTime); + FB.Backpropagate(); + FA.Backpropagate(); +end; + +{ TNNetPointwiseSoftMax } +constructor TNNetPointwiseSoftMax.Create(SkipBackpropDerivative: integer); +begin + inherited Create(); + FSkipBackpropDerivative := (SkipBackpropDerivative > 0); + FStruct[0] := SkipBackpropDerivative; +end; + +procedure TNNetPointwiseSoftMax.Compute; +var + StartTime: double; +begin + StartTime := Now(); + inherited Compute; + FOutput.PointwiseSoftMax(); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +constructor TNNetPointwiseSoftMax.Create; +begin + inherited; + // default = 0 + FSkipBackpropDerivative := False; + FStruct[0] := 0; +end; + +procedure TNNetPointwiseSoftMax.Backpropagate; +var + StartTime: double; + {$IFDEF Debug} + Min, Max: TNeuralFloat; + {$ENDIF} +begin + StartTime := Now(); + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if Assigned(FPrevLayer) and + (FPrevLayer.OutputError.Size > 0) and + (FPrevLayer.OutputError.Size = FPrevLayer.Output.Size) then + begin + if FSkipBackpropDerivative then + begin + FPrevLayer.OutputError.Add(FOutputError); + end + else + begin + // derivative is: x*(1-x) + // https://eli.thegreenplace.net/2016/the-softmax-function-and-its-derivative/ + // https://github.com/neuroph/neuroph/blob/master/neuroph-2.9/Contrib/src/main/java/org/neuroph/contrib/learning/SoftMax.java + FOutputErrorDeriv.Fill(1); + FOutputErrorDeriv.Sub(FOutput); + FOutputErrorDeriv.Mul(FOutput); + FPrevLayer.OutputError.MulAdd(FOutputError, FOutputErrorDeriv); + end; end; - if (not FBatchUpdate) then + {$IFDEF Debug} + Min := FOutputErrorDeriv.GetMin(); + Max := FOutputErrorDeriv.GetMax(); + if Min < 0 then FErrorProc('Softmax derivative is negative: '+FloatToStrF(Min,ffFixed,6,4)); + if Max > 0.25 then FErrorProc('Softmax derivative is bigger than 0.25: '+FloatToStrF(Max,ffFixed,6,4)); + {$ENDIF} + FBackwardTime := FBackwardTime + (Now() - StartTime); + FPrevLayer.Backpropagate(); +end; + +{ TNNetAddPositionalEmbedding } +procedure TNNetAddPositionalEmbedding.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FPositionalEmbedding.ReSize(FOutput); + FPositionalEmbedding.PositionalEncoding(FStruct[0]); +end; + +constructor TNNetAddPositionalEmbedding.Create(n: integer); +begin + inherited Create; + FPositionalEmbedding := TNNetVolume.Create; + if n=0 + then FStruct[0] := 10000 + else FStruct[0] := n; +end; + +constructor TNNetAddPositionalEmbedding.Create; +begin + inherited Create; + FPositionalEmbedding := TNNetVolume.Create; + FStruct[0] := 10000 +end; + +destructor TNNetAddPositionalEmbedding.Destroy; +begin + FPositionalEmbedding.Free; + inherited Destroy; +end; + +procedure TNNetAddPositionalEmbedding.Compute; +begin + inherited Compute; + FOutput.Add(FPositionalEmbedding); +end; + +{ TNNetTransposeYD } + +procedure TNNetTransposeYD.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.Output.SizeX, pPrevLayer.Output.Depth, pPrevLayer.Output.SizeY); + FOutputError.ReSize(pPrevLayer.OutputError.SizeX, pPrevLayer.OutputError.Depth, pPrevLayer.OutputError.SizeY); + FOutputErrorDeriv.ReSize(pPrevLayer.OutputErrorDeriv.SizeX, pPrevLayer.OutputErrorDeriv.Depth, pPrevLayer.OutputErrorDeriv.SizeY); +end; + +procedure TNNetTransposeYD.Compute; +var + StartTime: double; +begin + StartTime := Now(); + FOutput.CopyTransposingYD(FPrevLayer.FOutput); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +procedure TNNetTransposeYD.Backpropagate; +var + StartTime: double; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if Assigned(FPrevLayer) and + (FPrevLayer.OutputError.Size > 0) and + (FPrevLayer.OutputError.Size = FPrevLayer.Output.Size) then begin - FNeurons[0].UpdateWeights(FInertia); - AfterWeightUpdate(); + StartTime := Now(); + FPrevLayer.FOutputError.AddTransposingYD(FOutputError); + FBackwardTime := FBackwardTime + (Now() - StartTime); + end; + FPrevLayer.Backpropagate(); +end; + +{ TNNetTransposeXD } + +procedure TNNetTransposeXD.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.Output.Depth, pPrevLayer.Output.SizeY, pPrevLayer.Output.SizeX); + FOutputError.ReSize(pPrevLayer.OutputError.Depth, pPrevLayer.OutputError.SizeY, pPrevLayer.OutputError.SizeX); + FOutputErrorDeriv.ReSize(pPrevLayer.OutputErrorDeriv.Depth, pPrevLayer.OutputErrorDeriv.SizeY, pPrevLayer.OutputErrorDeriv.SizeX); +end; + +procedure TNNetTransposeXD.Compute; +var + StartTime: double; +begin + StartTime := Now(); + FOutput.CopyTransposingXD(FPrevLayer.FOutput); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +procedure TNNetTransposeXD.Backpropagate; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if Assigned(FPrevLayer) and + (FPrevLayer.OutputError.Size > 0) and + (FPrevLayer.OutputError.Size = FPrevLayer.Output.Size) then + begin + FPrevLayer.FOutputError.AddTransposingXD(FOutputError); + end; + FPrevLayer.Backpropagate(); +end; + +{ TNNetReLUP } + +procedure TNNetReLUP.Compute; +var + SizeM1: integer; + LocalPrevOutput: TNNetVolume; + OutputCnt: integer; + StartTime: double; +begin + StartTime := Now(); + LocalPrevOutput := FPrevLayer.Output; + SizeM1 := LocalPrevOutput.Size - 1; + + if (FOutput.Size = FOutputError.Size) and (FOutputErrorDeriv.Size = FOutput.Size) then + begin + for OutputCnt := 0 to SizeM1 do + begin + if LocalPrevOutput.FData[OutputCnt] > 0 then // Positive Values Only + begin + FOutput.FData[OutputCnt] := LocalPrevOutput.FData[OutputCnt]; + FOutputErrorDeriv.FData[OutputCnt] := 1; + end + else + begin + FOutput.FData[OutputCnt] := 0; + FOutputErrorDeriv.FData[OutputCnt] := 0; + end; + end; + end + else + begin + // can't calculate error on input layers. + for OutputCnt := 0 to SizeM1 do + begin + if LocalPrevOutput.FData[OutputCnt]>0 then + begin + FOutput.FData[OutputCnt] := LocalPrevOutput.FData[OutputCnt]; + end + else + begin + FOutput.FData[OutputCnt] := 0; + end; + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +{ TNNetMaxPoolWithPosition } + +procedure TNNetMaxPoolWithPosition.ComputeDefaultStride; +begin + inherited ComputeDefaultStride(); + ComputePositions(); +end; + +procedure TNNetMaxPoolWithPosition.ComputeWithStride; +begin + inherited ComputeWithStride(); + ComputePositions(); +end; + +procedure TNNetMaxPoolWithPosition.ComputePositions; +var + CntOutputX, CntOutputY, CntD: integer; + OutputMaxX, OutputMaxY, MaxD: integer; + OutputRawPos, PosX, PosY: integer; + PrevDepth: integer; + PositionBlockCnt: integer; +begin + OutputMaxX := Output.SizeX - 1; + OutputMaxY := Output.SizeY - 1; + PrevDepth := FPrevLayer.Output.Depth; + MaxD := PrevDepth - 1; + for CntOutputY := 0 to OutputMaxY do + begin + for CntOutputX := 0 to OutputMaxX do + begin + OutputRawPos := Output.GetRawPos(CntOutputX, CntOutputY); + for CntD := 0 to MaxD do + begin + PosX := FMaxPosX[OutputRawPos]; // Position X + PosY := FMaxPosY[OutputRawPos]; // Position Y + PositionBlockCnt := 0; + if FLogPosX then + begin + Inc(PositionBlockCnt); + FOutput.FData[OutputRawPos + PrevDepth*PositionBlockCnt] := FPosX[PosX]; + end; + if FLogPosY then + begin + Inc(PositionBlockCnt); + FOutput.FData[OutputRawPos + PrevDepth*PositionBlockCnt] := FPosY[PosY]; + end; + Inc(OutputRawPos); + end; + end; + end; +end; + +constructor TNNetMaxPoolWithPosition.Create(pPoolSize: integer; + pStride: integer; pPadding: integer; + pLogPosX: integer; pLogPosY: integer; + pMaxBackpropX: integer = 0); +begin + inherited Create(pPoolSize, pStride, pPadding); + FStruct[3] := pLogPosX; + FStruct[4] := pLogPosY; + FStruct[5] := pMaxBackpropX; + FLogPosX := (pLogPosX>0); + FLogPosY := (pLogPosY>0); + FExtraSize := 0; + FMaxBackpropX := pMaxBackpropX; + if FLogPosX then Inc(FExtraSize); + if FLogPosY then Inc(FExtraSize); +end; + +destructor TNNetMaxPoolWithPosition.Destroy; +begin + SetLength(FPosX, 0); + SetLength(FPosY, 0); + inherited Destroy; +end; + +procedure TNNetMaxPoolWithPosition.Backpropagate; +var + CntOutputX, CntOutputY, CntD: integer; + OutputMaxX, OutputMaxY, MaxD: integer; + OutputRawPos: integer; + PrevDepth: integer; +begin + if FMaxBackpropX < Output.SizeX then + begin + OutputMaxX := Output.SizeX - 1; + OutputMaxY := Output.SizeY - 1; + PrevDepth := FPrevLayer.Output.Depth; + MaxD := PrevDepth - 1; + for CntOutputY := 0 to OutputMaxY do + begin + for CntOutputX := FMaxBackpropX to OutputMaxX do + begin + OutputRawPos := Output.GetRawPos(CntOutputX, CntOutputY); + for CntD := 0 to MaxD do + begin + FOutput.FData[OutputRawPos] := 0; + Inc(OutputRawPos); + end; + end; + end; + end; + inherited Backpropagate; +end; + +procedure TNNetMaxPoolWithPosition.SetPrevLayer(pPrevLayer: TNNetLayer); +var + CntSizeX, CntSizeY: integer; +begin + inherited SetPrevLayer(pPrevLayer); + if FExtraSize > 0 then + begin + FOutput.ReSize(FOutputSizeX, FOutputSizeY, FOutputSizeD * (1+FExtraSize)); + FOutputError.ReSize(FOutputSizeX, FOutputSizeY, FOutputSizeD * (1+FExtraSize)); + FOutputErrorDeriv.ReSize(FOutputSizeX, FOutputSizeY, FOutputSizeD * (1+FExtraSize)); + SetLength(FMaxPosX, FOutput.Size); + SetLength(FMaxPosY, FOutput.Size); + SetLength(FPosX, FPrevLayer.Output.SizeX); + SetLength(FPosY, FPrevLayer.Output.SizeY); + for CntSizeX := 0 to FPrevLayer.Output.SizeX - 1 do + begin + FPosX[CntSizeX] := CntSizeX/FPrevLayer.Output.SizeX; + end; + for CntSizeY := 0 to FPrevLayer.Output.SizeY - 1 do + begin + FPosY[CntSizeY] := CntSizeY/FPrevLayer.Output.SizeY; + end; + if FMaxBackpropX = 0 then + begin + FMaxBackpropX := FOutputSizeX; + end; + FMaxBackpropX := Min(FOutputSizeX, FMaxBackpropX); + FStruct[5] := FMaxBackpropX; + end; +end; + +{ TNNetCrop } + +procedure TNNetCrop.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(FLenX, FLenY, pPrevLayer.FOutput.Depth); + if (pPrevLayer.FOutputError.Size = pPrevLayer.FOutput.Size) then + begin + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); + end; +end; + +constructor TNNetCrop.Create(StartX, StartY, LenX, LenY: integer); +begin + inherited Create(); + FStartX := StartX; + FStartY := StartY; + FLenX := Max(LenX, 1); + FLenY := Max(LenY, 1); + FStruct[0] := StartX; + FStruct[1] := StartY; + FStruct[2] := FLenX; + FStruct[3] := FLenY; +end; + +procedure TNNetCrop.Compute; +var + StartTime: double; +begin + StartTime := Now(); + if + (FPrevLayer.FOutputError.Size = FPrevLayer.FOutput.Size) and + (FOutput.Size <> FOutputError.Size) + then + begin + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); + end; + FOutput.CopyCropping(FPrevLayer.FOutput, FStartX, FStartY, FLenX, FLenY); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +constructor TNNetCrop.Create; +begin + raise Exception.Create('Default constructor not allowed here'); + inherited; +end; + +procedure TNNetCrop.Backpropagate; +var + StartTime: double; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if (FPrevLayer.Output.Size > 0) and (FPrevLayer.Output.Size = FPrevLayer.OutputError.Size) then + begin + StartTime := Now(); + FPrevLayer.FOutputError.AddArea + ( + {DestX=}FStartX, + {DestY=}FStartY, + {OriginX=}0, + {OriginY=}0, + {LenX=}FLenX, + {LenY=}FLenY, + FOutputError + ); + FBackwardTime := FBackwardTime + (Now() - StartTime); + end; + if Assigned(FPrevLayer) then FPrevLayer.Backpropagate(); +end; + +{ TNNetPadXY } + +procedure TNNetPadXY.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.FOutput.SizeX + FPaddingX*2, pPrevLayer.FOutput.SizeY + FPaddingY*2, pPrevLayer.FOutput.Depth); + if (pPrevLayer.FOutputError.Size = pPrevLayer.FOutput.Size) then + begin + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); + end; +end; + +constructor TNNetPadXY.Create(PaddingX, PaddingY: integer); +begin + inherited Create(); + FStruct[0] := PaddingX; + FStruct[1] := PaddingY; + FPaddingX := PaddingX; + FPaddingY := PaddingY; +end; + +procedure TNNetPadXY.Compute; +var + StartTime: double; +begin + StartTime := Now(); + if + (FPrevLayer.FOutputError.Size = FPrevLayer.FOutput.Size) and + (FOutput.Size <> FOutputError.Size) + then + begin + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); + end; + FOutput.CopyPadding(FPrevLayer.FOutput, FPaddingX, FPaddingY); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +constructor TNNetPadXY.Create; +begin + raise Exception.Create('Default constructor not allowed here'); + inherited; +end; + +procedure TNNetPadXY.Backpropagate; +var + StartTime: double; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if (FPrevLayer.Output.Size > 0) and (FPrevLayer.Output.Size = FPrevLayer.OutputError.Size) then + begin + StartTime := Now(); + FPrevLayer.FOutputError.AddArea + ( + {DestX=}0, + {DestY=}0, + {OriginX=}FPaddingX, + {OriginY=}FPaddingY, + {LenX=}FPrevLayer.OutputError.SizeX, + {LenY=}FPrevLayer.OutputError.SizeY, + FOutputError + ); + FBackwardTime := FBackwardTime + (Now() - StartTime); + end; + if Assigned(FPrevLayer) then FPrevLayer.Backpropagate(); +end; + +{ TNNetGroupedPointwiseConvHardSwish } +constructor TNNetGroupedPointwiseConvHardSwish.Create(pNumFeatures, + pGroups: integer; pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pGroups, pSuppressBias); + FActivationFn := @HardSwish; + FActivationFnDerivative := @HardSwishDerivative; +end; + +{ TNNetHardSwish } + +procedure TNNetHardSwish.Compute(); +var + SizeM1: integer; + LocalPrevOutput: TNNetVolume; + OutputCnt: integer; + StartTime: double; + x: TNeuralFloat; +begin + StartTime := Now(); + LocalPrevOutput := FPrevLayer.Output; + SizeM1 := LocalPrevOutput.Size - 1; + + if (FOutput.Size = FOutputError.Size) and (FOutputErrorDeriv.Size = FOutput.Size) then + begin + for OutputCnt := 0 to SizeM1 do + begin + x := LocalPrevOutput.FData[OutputCnt]; + if x > 3 then + begin + FOutput.FData[OutputCnt] := x; + FOutputErrorDeriv.FData[OutputCnt] := 1; + end + else if x < -3 then + begin + FOutput.FData[OutputCnt] := 0; + FOutputErrorDeriv.FData[OutputCnt] := 0; + end + else + begin + FOutput.FData[OutputCnt] := x*(x + 3)/6; + FOutputErrorDeriv.FData[OutputCnt] := 0.3333*x + 0.5; + end; + end; + end + else + begin + // can't calculate error on input layers. + for OutputCnt := 0 to SizeM1 do + begin + x := LocalPrevOutput.FData[OutputCnt]; + if x > 3 then + begin + FOutput.FData[OutputCnt] := x; + end + else if x < -3 then + begin + FOutput.FData[OutputCnt] := 0; + end + else + begin + FOutput.FData[OutputCnt] := x*(x + 3)/6; + end; + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +{ TNNetConvolutionHardSwish } + +constructor TNNetConvolutionHardSwish.Create(pNumFeatures, pFeatureSize, + pInputPadding, pStride: integer; pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); + FActivationFn := @HardSwish; + FActivationFnDerivative := @HardSwishDerivative; +end; + +{ TNNetConvolutionSwish } + +constructor TNNetConvolutionSwish.Create(pNumFeatures, pFeatureSize, + pInputPadding, pStride: integer; pSuppressBias: integer); +begin + inherited Create(pNumFeatures, pFeatureSize, pInputPadding, pStride, pSuppressBias); + FActivationFn := @Swish; + FActivationFnDerivative := @SwishDerivative; +end; + +{ TNNetScaleLearning } + +procedure TNNetScaleLearning.Compute(); +begin + inherited Compute(); +end; + +procedure TNNetScaleLearning.Backpropagate(); +var + StartTime: double; + Magnitude: TNeuralFloat; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + StartTime := Now(); + Magnitude := FOutputError.GetMagnitude(); + if (Magnitude > 0) and (Magnitude < 1) then + begin + FOutputError.Mul(1/Magnitude); end; - //if Random(100)=0 then WriteLn(MagnitudeDelta:6:4,' - ',FNeurons[0].Weights.FData[1]:6:4); + //if Random(100)=0 then WriteLn(Magnitude:6:4,' - ',FNeurons[0].Weights.FData[1]:6:4); FPrevLayer.FOutputError.Add(FOutputError); FPrevLayer.Backpropagate(); FBackwardTime := FBackwardTime + (Now() - StartTime); @@ -2169,25 +3105,34 @@ constructor TNNetDebug.Create(hasForward, hasBackward: integer); { TNNetDebug } procedure TNNetDebug.Compute(); +var + StartTime: double; begin inherited Compute(); if ((FStruct[0]>0) and (Random(1000)=0)) then begin + StartTime := Now(); Write('Forward:'); FOutput.PrintDebug(); WriteLn; + FForwardTime := FForwardTime + (Now() - StartTime); end; end; procedure TNNetDebug.Backpropagate(); +var + StartTime: double; begin - Inc(FBackPropCallCurrentCnt); - if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + // FBackPropCallCurrentCnt check is inherited done. + //Inc(FBackPropCallCurrentCnt); + //if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; if ((FStruct[1]>0) and (Random(1000)=0)) then begin + StartTime := Now(); Write('Backward:'); FOutputError.PrintDebug(); WriteLn; + FBackwardTime := FBackwardTime + (Now() - StartTime); end; inherited Backpropagate(); end; @@ -2422,7 +3367,9 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); var OutputX, OutputY, OutputD: integer; MaxX, MaxY, MaxD: integer; - (*GroupId, *)GroupDSize, GroupDStart: integer; + OutputGroupDSize: integer; + PrevLayerGroupDSize, PrevLayerGroupDStart: integer; + OutputGroupId: integer; PrevX, PrevY: integer; OutputRawPos: integer; CanBackpropOnPos: boolean; @@ -2453,7 +3400,8 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); MaxD := OutputError.Depth - 1; LocalDestPtr := nil; // Debug code: FOutputError.ForceMaxAbs(1); - GroupDSize := OutputError.Depth div FStruct[5]; + OutputGroupDSize := OutputError.Depth div FStruct[5]; + PrevLayerGroupDSize := FPrevLayer.OutputError.Depth div FStruct[5]; if FPadding > 0 then begin FPrevLayerErrorPadded.Fill(0); @@ -2493,10 +3441,12 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY, StartTileD); for OutputD := StartTileD to EndTileD do begin - //GroupId := FArrGroupId[OutputD]; - GroupDStart := FArrGroupIdStart[OutputD]; + // What is the current group id for OutputD? + OutputGroupId := FOutputGroupId[OutputD]; + // What is the starting point (depth) in the previous layer for this group id? + PrevLayerGroupDStart := FGroupIdToPrevLayerIdStart[OutputGroupId]; if (FCalculatePrevLayerError and CanBackpropOnPos) - then LocalDestPtr := LocalPrevError.GetRawPtr(PrevX, PrevY, GroupDStart); + then LocalDestPtr := LocalPrevError.GetRawPtr(PrevX, PrevY, PrevLayerGroupDStart); {$IFDEF FPC} if FActivationFn = @RectifiedLinearUnit then begin @@ -2529,7 +3479,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); LocalLearningErrorDeriv := (-FLearningRate) * LocalOutputErrorDeriv; if (LocalLearningErrorDeriv <> 0.0) then begin - PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY, GroupDStart); + PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY, PrevLayerGroupDStart); {$IFNDEF AVX64} FArrNeurons[OutputD].Delta.MulAdd(LocalLearningErrorDeriv, PtrPreparedInput); {$ELSE} @@ -2549,7 +3499,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); LocalWeight := FArrNeurons[OutputD].Weights; if FPointwise then begin - LocalPrevError.MulAdd(LocalDestPtr, LocalWeight.DataPtr, LocalOutputErrorDeriv, GroupDSize); + LocalPrevError.MulAdd(LocalDestPtr, LocalWeight.DataPtr, LocalOutputErrorDeriv, PrevLayerGroupDSize); end else begin @@ -2561,10 +3511,10 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); begin LocalPrevError.MulAdd ( - LocalPrevError.GetRawPtr(PrevX + LocalCntX, PrevY + LocalCntY, GroupDStart), //PrevPtrA + LocalPrevError.GetRawPtr(PrevX + LocalCntX, PrevY + LocalCntY, PrevLayerGroupDStart), //PrevPtrA LocalWeight.GetRawPtr(LocalCntX, LocalCntY), //PrevPtrB SmoothLocalOutputErrorDeriv, - GroupDSize + OutputGroupDSize ); end; end; @@ -2588,7 +3538,8 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); procedure TNNetGroupedConvolutionLinear.SetPrevLayer(pPrevLayer: TNNetLayer); var - GroupDSize: integer; + PrevLayerGroupDSize: integer; + OutputGroupDSize: integer; OutputD: integer; GroupId, GroupDStart: integer; LocalPrevError: TNNetVolume; @@ -2596,18 +3547,34 @@ procedure TNNetGroupedConvolutionLinear.SetPrevLayer(pPrevLayer: TNNetLayer); inherited SetPrevLayer(pPrevLayer); FVectorSize := FFeatureSizeX*FFeatureSizeY*(pPrevLayer.Output.Depth div FStruct[5]); FVectorSizeBytes := FVectorSize * SizeOf(TNeuralFloat); - GroupDSize := pPrevLayer.Output.Depth div FStruct[5]; - SetNumWeightsForAllNeurons(FFeatureSizeX, FFeatureSizeY, GroupDSize); + PrevLayerGroupDSize := pPrevLayer.Output.Depth div FStruct[5]; + OutputGroupDSize := FOutput.Depth div FStruct[5]; + SetNumWeightsForAllNeurons(FFeatureSizeX, FFeatureSizeY, PrevLayerGroupDSize); InitDefault(); AfterWeightUpdate(); - SetLength(FArrGroupId, pPrevLayer.Output.Depth); - SetLength(FArrGroupIdStart, pPrevLayer.Output.Depth); + SetLength(FArrPrevLayerGroupId, pPrevLayer.Output.Depth); + SetLength(FArrPrevLayerGroupIdStart, pPrevLayer.Output.Depth); + SetLength(FOutputGroupId, FOutput.Depth); + SetLength(FOutputGroupIdStart, FOutput.Depth); + SetLength(FGroupIdToPrevLayerIdStart, FStruct[5]); + SetLength(FGroupIdToOutputIdStart, FStruct[5]); + for OutputD := 0 to pPrevLayer.Output.Depth - 1 do begin - GroupId := OutputD div GroupDSize; - GroupDStart := GroupId * GroupDSize; - FArrGroupId[OutputD] := GroupId; - FArrGroupIdStart[OutputD] := GroupDStart; + GroupId := OutputD div PrevLayerGroupDSize; + GroupDStart := GroupId * PrevLayerGroupDSize; + FGroupIdToPrevLayerIdStart[GroupId] := GroupDStart; + FArrPrevLayerGroupId[OutputD] := GroupId; + FArrPrevLayerGroupIdStart[OutputD] := GroupDStart; + end; + + for OutputD := 0 to FOutput.Depth - 1 do + begin + GroupId := OutputD div OutputGroupDSize; + GroupDStart := GroupId * OutputGroupDSize; + FGroupIdToOutputIdStart[GroupId] := GroupDStart; + FOutputGroupId[OutputD] := GroupId; + FOutputGroupIdStart[OutputD] := GroupDStart; end; if FPadding > 0 then @@ -2635,8 +3602,12 @@ constructor TNNetGroupedConvolutionLinear.Create(pNumFeatures, pFeatureSize, destructor TNNetGroupedConvolutionLinear.Destroy(); begin - SetLength(FArrGroupId, 0); - SetLength(FArrGroupIdStart, 0); + SetLength(FArrPrevLayerGroupId, 0); + SetLength(FArrPrevLayerGroupIdStart, 0); + SetLength(FOutputGroupId, 0); + SetLength(FOutputGroupIdStart, 0); + SetLength(FGroupIdToPrevLayerIdStart, 0); + SetLength(FGroupIdToOutputIdStart, 0); inherited Destroy(); end; @@ -2731,7 +3702,7 @@ constructor TNNetNegate.Create(); procedure TNNetMulByConstant.Compute(); begin inherited Compute(); - FOutput.Mul(FStruct[0]); + FOutput.Mul(FFloatSt[0]); end; procedure TNNetCellMulByCell.SetPrevLayer(pPrevLayer: TNNetLayer); @@ -3050,6 +4021,12 @@ procedure TNNetPad.Compute(); FForwardTime := FForwardTime + (Now() - StartTime); end; +constructor TNNetPad.Create; +begin + raise Exception.Create('Default constructor not allowed here'); + inherited; +end; + procedure TNNetPad.Backpropagate(); var StartTime: double; @@ -5228,9 +6205,9 @@ procedure TNNetLayerConcatedWeights.EnableOpenCL( RefreshNeuronWeightList(); AfterWeightUpdate(); - FConcatedWeights.ReSize(FNeuronWeightList.GetTotalSize(),1,1); + FConcatedWeights.ReSize(FNeuronWeightList.Count, 1, FNeuronWeightList[0].Size); - FConcatedWInter.ReSize(FNeuronWeightList.GetTotalSize(),1,1); + FConcatedWInter.ReSize(FNeuronWeightList[0].Size, 1, FNeuronWeightList.Count); //WriteLn(' Layer:', Self.LayerIdx,' Vector:',FVectorSize,' Neuron count:',FNeuronWeightList.Count,' Output size:',FOutput.Size); FShouldInterleaveWeights := true; @@ -5304,10 +6281,14 @@ procedure TNNetReLUBase.Backpropagate(); end; { TNNetMulLearning } -constructor TNNetMulLearning.Create(pMul: integer); +constructor TNNetMulLearning.Create(pMul: TNeuralFloat); begin inherited Create(); - FStruct[0] := pMul; + FFloatSt[0] := pMul; + if pMul = 0 then + begin + FErrorProc('TNNetMulLearning or TNNetMulByConstant can not be zero.'); + end; end; procedure TNNetMulLearning.Backpropagate(); @@ -5317,7 +6298,7 @@ procedure TNNetMulLearning.Backpropagate(); StartTime := Now(); Inc(FBackPropCallCurrentCnt); if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; - FOutputError.Mul(FStruct[0]); + FOutputError.Mul(FFloatSt[0]); FBackwardTime := FBackwardTime + (Now() - StartTime); inherited Backpropagate(); end; @@ -5329,9 +6310,6 @@ constructor TNNetSum.Create(aL: array of TNNetLayer); SizeX, SizeY, Deep: integer; begin inherited Create(); - SizeX := aL[0].FOutput.SizeX; - SizeY := aL[0].FOutput.SizeY; - Deep := aL[0].FOutput.Depth; if Length(aL) < 1 then begin @@ -5339,6 +6317,10 @@ constructor TNNetSum.Create(aL: array of TNNetLayer); end else begin + SizeX := aL[0].FOutput.SizeX; + SizeY := aL[0].FOutput.SizeY; + Deep := aL[0].FOutput.Depth; + for LayerCnt := Low(aL) to High(aL) do begin if @@ -5929,6 +6911,175 @@ function THistoricalNets.AddSuperResolution(pSizeX, pSizeY, BottleNeck, pNeurons Result := AddLayer( TNNetConvolutionLinear.Create(3,1,0,0) ); end; +// Ported code from: +// https://github.com/tgautam03/Transformers/blob/master/classification.ipynb +procedure TNNet.AddSingleHeadSelfAttention( + out Attended, W: TNNetLayer); +var + x, Query, Key, ValueT: TNNetLayer; // WT, YT, Value + EmbeddingDim: integer; +begin + x := GetLastLayer(); + EmbeddingDim := x.Output.Depth; + Query := AddLayerAfter( TNNetPointwiseConvLinear.Create(EmbeddingDim), x); + Key := AddLayerAfter( TNNetPointwiseConvLinear.Create(EmbeddingDim), x); + (*Value:=*)AddLayerAfter( TNNetPointwiseConvLinear.Create(EmbeddingDim), x); + ValueT := AddLayer( TNNetTransposeXD.Create() ); + (*WT := *)AddLayer( TNNetDotProducts.Create(Query, Key) ); + (*WT := *)AddLayer( TNNetMulByConstant.Create(1/Sqrt(EmbeddingDim)) ); + (*WT := *)AddLayer( TNNetReLUL.Create(-500,+500,0) ); + (*W := *) AddLayer( TNNetTransposeXD.Create() ); + W := AddLayer( TNNetPointwiseSoftMax.Create() ); + (*YT := *)AddLayer( TNNetDotProducts.Create(ValueT, W) ); + Attended := AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); +end; + +function TNNet.AddSelfAttention(Heads: integer): TNNetLayer; +var + W : TNNetLayer; + // Query, Key, ValueT: TNNetLayer; // WT, YT, Value + PreviousLayer: TNNetLayer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + HeadCnt: integer; + QueryGroup, KeyGroup, {ValueGroup, }ValueTGroup: TNNetLayer; +begin + if Heads <= 1 then + begin + AddSingleHeadSelfAttention(Result, W); + end + else + begin + PreviousLayer := GetLastLayer(); + SetLength(EachGroupOutput, Heads); + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Heads; + for HeadCnt := 0 to Heads - 1 do + begin + QueryGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + KeyGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + {ValueGroup := }AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + ValueTGroup := AddLayer( TNNetTransposeXD.Create() ); + (*W := *)AddLayer( TNNetDotProducts.Create(QueryGroup, KeyGroup) ); + (*W := *)AddLayer( TNNetReLUL.Create(-100,+100,0) ); + (*W := *)AddLayer( TNNetMulByConstant.Create(1/Sqrt(InputChannelsPerGroup)) ); + W := AddLayer( TNNetPointwiseSoftMax.Create() ); + (*YT := *)AddLayer( TNNetDotProducts.Create(ValueTGroup, W) ); + EachGroupOutput[HeadCnt] := GetLastLayer(); + end; + AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); + SetLength(EachGroupOutput, 0); + Result := AddLayer( TNNetPointwiseConvLinear.Create(PreviousLayer.FOutput.Depth) ); + end; +end; + +function TNNet.AddSelfAttentionCAI(Heads: integer): TNNetLayer; +var + W: TNNetLayer; + PreviousLayer: TNNetLayer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + HeadCnt: integer; + QueryGroup, KeyGroup, ValueGroup, ValueTGroup: TNNetLayer; +begin + if Heads <= 1 then + begin + AddSingleHeadSelfAttention(Result, W); + end + else + begin + PreviousLayer := GetLastLayer(); + SetLength(EachGroupOutput, Heads); + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Heads; + for HeadCnt := 0 to Heads - 1 do + begin + QueryGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + KeyGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + ValueGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + ValueTGroup := AddLayerAfter( TNNetTransposeXD.Create(), ValueGroup); + (*W := *)AddLayer( TNNetDotProducts.Create(QueryGroup, KeyGroup) ); + (*W := *)AddLayer( TNNetLayerMaxNormalization.Create() ); + W := AddLayer( TNNetPointwiseSoftMax.Create() ); + (*YT := *)AddLayer( TNNetDotProducts.Create(ValueTGroup, W) ); + EachGroupOutput[HeadCnt] := GetLastLayer(); + end; + AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); + SetLength(EachGroupOutput, 0); + // Groups with few channels tend to be numerically unstable + if InputChannelsPerGroup < 64 then + begin + AddLayer( TNNetMulByConstant.Create(InputChannelsPerGroup/64) ); + end; + Result := AddLayer( TNNetPointwiseConvLinear.Create(PreviousLayer.FOutput.Depth) ); + end; +end; + +// Ported code from: +// https://github.com/tgautam03/Transformers/blob/master/classification.ipynb +procedure TNNet.AddSingleHeadTransformerBlock( + out Result, W: TNNetLayer; + HasNorm: boolean = False); +var + PrevLayer, AttendedPlusPrev, Attended: TNNetLayer; + EmbeddingDim: integer; +begin + PrevLayer := GetLastLayer(); + EmbeddingDim := PrevLayer.Output.Depth; + AddSingleHeadSelfAttention(Attended, W); + AddLayer( TNNetSum.Create([Attended, PrevLayer]) ); + if HasNorm + then AttendedPlusPrev := AddLayer( TNNetMovingScale.Create() ) + else AttendedPlusPrev := GetLastLayer(); + AddLayer( TNNetPointwiseConvReLU.Create(EmbeddingDim*4) ); + AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); + if HasNorm then Result := AddLayer( TNNetMovingScale.Create() ); + AddLayer( TNNetSum.Create([ GetLastLayer(), AttendedPlusPrev]) ); + if HasNorm + then Result := AddLayer( TNNetMovingScale.Create() ) + else Result := GetLastLayer(); +end; + +function TNNet.AddTransformerBlock(Heads: integer; + IntermediateDim: integer; HasNorm: boolean = False + ): TNNetLayer; +var + PrevLayer, AttendedPlusPrev, Attended: TNNetLayer; + EmbeddingDim: integer; +begin + PrevLayer := GetLastLayer(); + EmbeddingDim := PrevLayer.Output.Depth; + Attended := AddSelfAttention(Heads); + AddLayer( TNNetSum.Create([Attended, PrevLayer]) ); + if HasNorm + then AttendedPlusPrev := AddLayer( TNNetMovingScale.Create() ) + else AttendedPlusPrev := GetLastLayer(); + AddLayer( TNNetPointwiseConvReLU.Create(IntermediateDim) ); + AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); + if HasNorm then AddLayer( TNNetMovingScale.Create() ); + AddLayer( TNNetSum.Create([ GetLastLayer(), AttendedPlusPrev]) ); + if HasNorm + then Result := AddLayer( TNNetMovingScale.Create() ) + else Result := GetLastLayer(); +end; + +function TNNet.AddTransformerBlockCAI(Heads: integer; + IntermediateDim: integer; + HasNorm: boolean = False + ): TNNetLayer; +var + PrevLayer, AttendedPlusPrev, Attended: TNNetLayer; + EmbeddingDim: integer; +begin + PrevLayer := GetLastLayer(); + EmbeddingDim := PrevLayer.Output.Depth; + Attended := AddSelfAttentionCAI(Heads); + AttendedPlusPrev := AddLayer( TNNetSum.Create([Attended, PrevLayer]) ); + AddLayer( TNNetPointwiseConvReLU.Create(IntermediateDim) ); + if HasNorm then AddLayer( TNNetMovingStdNormalization.create() ); + AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); + AddLayer( TNNetSum.Create([ GetLastLayer(), AttendedPlusPrev]) ); + Result := GetLastLayer(); +end; + { TNNetFullConnectLinear } procedure TNNetFullConnectLinear.ComputePreviousLayerErrorCPU(); @@ -6444,13 +7595,23 @@ destructor TNNetConcatBase.Destroy(); function TNNetConcatBase.SaveStructureToString(): string; var I: integer; + LayersStr: string; +//begin +// Result := inherited SaveStructureToString + ':'; +// for I := 0 to FPrevLayerList.Count - 1 do +// begin +// if I > 0 then Result := Result + ';'; +// Result := Result + IntToStr(FPrevLayerList[I].FLayerIdx); +// end; +//end; begin - Result := inherited SaveStructureToString + ':'; + LayersStr := ''; for I := 0 to FPrevLayerList.Count - 1 do begin - if I > 0 then Result := Result + ';'; - Result := Result + IntToStr(FPrevLayerList[I].FLayerIdx); + if I > 0 then LayersStr := LayersStr + ';'; + LayersStr := LayersStr + IntToStr(FPrevLayerList[I].FLayerIdx); end; + Result := StringReplace(inherited SaveStructureToString,'::',':'+LayersStr+':',[rfReplaceAll]); end; procedure TNNetConcatBase.BackpropagateConcat(); @@ -6574,14 +7735,25 @@ procedure TNNetSplitChannels.Backpropagate(); function TNNetSplitChannels.SaveStructureToString(): string; var I, MaxChannels: integer; + LayersStr: string; +//begin +// Result := inherited SaveStructureToString + ':'; +// MaxChannels := Length(FChannels) - 1; +// for I := 0 to MaxChannels do +// begin +// if I > 0 then Result := Result + ';'; +// Result := Result + IntToStr(FChannels[I]); +// end; +//end; begin - Result := inherited SaveStructureToString + ':'; + LayersStr := ''; MaxChannels := Length(FChannels) - 1; for I := 0 to MaxChannels do begin - if I > 0 then Result := Result + ';'; - Result := Result + IntToStr(FChannels[I]); + if I > 0 then LayersStr := LayersStr + ';'; + LayersStr := LayersStr + IntToStr(FChannels[I]); end; + Result := StringReplace(inherited SaveStructureToString,'::',':'+LayersStr+':',[rfReplaceAll]); end; procedure TestDataParallelism(NN: TNNet); @@ -7114,14 +8286,15 @@ constructor TNNetDataParallelism.Create(CloneNN: TNNet; pSize: integer; pFreeObj NN: TNNet; begin inherited Create(pFreeObjects); - NNData := CloneNN.SaveToString(); - + NNData := CloneNN.SaveStructureToString(); for I := 1 to pSize do begin NN := TNNet.Create; - NN.LoadFromString(NNData); + NN.LoadStructureFromString(NNData); + NN.CopyWeights(CloneNN); Self.Add(NN); end; + NNData := ''; end; constructor TNNetDataParallelism.Create(pSize: integer; pFreeObjects: Boolean); @@ -7863,8 +9036,12 @@ procedure TNNetIdentity.SetPrevLayer(pPrevLayer: TNNetLayer); end; procedure TNNetIdentity.Compute; +var + StartTime: double; begin + StartTime := Now(); FOutput.CopyNoChecks(FPrevLayer.FOutput); + FForwardTime := FForwardTime + (Now() - StartTime); end; procedure TNNetIdentity.Backpropagate; @@ -8196,10 +9373,14 @@ constructor TNNetFullConnectReLU.Create(pSize: integer; pSuppressBias: integer = end; { TNNetSoftMax } -procedure TNNetSoftMax.Compute(); +procedure TNNetSoftMax.Compute; +var + StartTime: double; begin - inherited Compute(); + StartTime := Now(); + FOutput.CopyNoChecks(FPrevLayer.FOutput); FSoftTotalSum := FOutput.SoftMax(); + FForwardTime := FForwardTime + (Now() - StartTime); end; { TNNetConvolutionReLU } @@ -8361,7 +9542,7 @@ procedure TNNetMaxPool.ComputeWithStride(); begin OutputMaxX := Output.SizeX - 1; OutputMaxY := Output.SizeY - 1; - MaxD := Output.Depth - 1; + MaxD := FPrevLayer.Output.Depth - 1; LocalPoolSizeM1 := FPoolSize - 1; InputSizeXM1 := FInputCopy.SizeX - 1; InputSizeYM1 := FInputCopy.SizeY - 1; @@ -8430,7 +9611,7 @@ procedure TNNetPoolBase.BackpropagateDefaultStride(); begin MaxX := Output.SizeX - 1; MaxY := Output.SizeY - 1; - MaxD := Output.Depth - 1; + MaxD := FPrevLayer.Output.Depth - 1; //Although the below line makes all the sense, it might brake compatibility //with existing code. //if FStride > 1 then FOutputError.Mul( Min(FStride, 4) ); @@ -8464,7 +9645,7 @@ procedure TNNetPoolBase.BackpropagateWithStride(); begin MaxX := Output.SizeX - 1; MaxY := Output.SizeY - 1; - MaxD := Output.Depth - 1; + MaxD := FPrevLayer.Output.Depth - 1; //Although the below line makes all the sense, it might brake compatibility //with existing code. //if FStride > 1 then FOutputError.Mul( Min(FStride, 4) ); @@ -8524,7 +9705,7 @@ procedure TNNetConvolutionBase.SetPrevLayer(pPrevLayer: TNNetLayer); FInputPrepared.Resize(FOutputSizeX, FOutputSizeY, FVectorSize); end; RefreshNeuronWeightList(); - if ShouldUseInterleavedDotProduct then + if ShouldUseInterleavedDotProduct (*or FPointwise*) then begin FShouldConcatWeights := true; FShouldInterleaveWeights := true; @@ -8927,7 +10108,7 @@ procedure TNNetConvolution.Backpropagate(); //BackpropagateFastCPUDev(); //BackpropagateFastCPU(); - BackpropagateFastTiledCPU(); + BackpropagateFastTiledCPU(); // This is our default backprop //BackpropagateCPU(); {$IFDEF CheckRange}ForceRangeWeights(1000);{$ENDIF} @@ -9378,6 +10559,14 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); end; end; + (* + if (FPointwise and FCalculatePrevLayerError) then + begin + FPrevLayerErrorPadded.DotProductsPointwise(FConcatedWInter, FOutputErrorDeriv); + LocalPrevError.Add(FPrevLayerErrorPadded); + end; + *) + if FPadding > 0 then begin FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); @@ -9402,28 +10591,31 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); LocalOutputErrorDeriv: TNeuralFloat; SmoothLocalOutputErrorDeriv: TNeuralFloat; LocalWeight, LocalPrevError: TNNetVolume; - {SrcPtr,} LocalDestPtr: TNeuralFloatArrPtr; + //{SrcPtr,} LocalDestPtr: TNeuralFloatArrPtr; SmoothLocalOutputErrorDerivPtr: pointer; PrevNumElements, PrevMissedElements: integer; - {$IFDEF AVX64}PtrNeuronDelta : TNeuralFloatArrPtr; {$ENDIF} + PtrNeuronDelta, PtrPreparedInput: TNeuralFloatArrPtr; PrevPtrA, PrevPtrB: TNeuralFloatArrPtr; NeuronWeights: integer; LocalLearningErrorDerivPtr: pointer; localNumElements, MissedElements: integer; MaxPrevX, MaxPrevY: integer; - InterErrorDeriv, InterInput: TNNetVolume; - NeuronCnt, NeuronPosCnt: integer; - LocalDelta: TNNetVolume; begin - InterErrorDeriv := TNNetVolume.Create(); - InterInput := TNNetVolume.Create(); MaxX := OutputError.SizeX - 1; MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; - LocalDestPtr := nil; - MaxPrevX := 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX; - MaxPrevY := 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY; - LocalPrevError := FPrevLayer.OutputError; + //LocalDestPtr := nil; + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; + MaxPrevX := 1 + LocalPrevError.SizeX - FFeatureSizeX; + MaxPrevY := 1 + LocalPrevError.SizeY - FFeatureSizeY; PrevNumElements := (FSizeXDepth div 4) * 4; PrevMissedElements := FSizeXDepth - PrevNumElements; NeuronWeights := FArrNeurons[0].Delta.Size; @@ -9434,15 +10626,17 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); begin for OutputY := 0 to MaxY do begin - PrevY := (OutputY*FStride)-FPadding; + PrevY := (OutputY*FStride); for OutputX := 0 to MaxX do begin - PrevX := (OutputX*FStride)-FPadding; + PrevX := (OutputX*FStride); OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY); - if (FCalculatePrevLayerError) then LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); - //PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); + //TODO: the next line is probably wrong. + // this is actually never used afterwards -> I comment it out + //if (FCalculatePrevLayerError) then LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); + PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); CanBackpropOnPos := - (PrevX >= 0) and (PrevY >= 0) and + //(PrevX >= 0) and (PrevY >= 0) and (PrevX < MaxPrevX) and (PrevY < MaxPrevY); for OutputD := 0 to MaxD do @@ -9450,7 +10644,7 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); {$IFDEF FPC} if FActivationFn = @RectifiedLinearUnit then begin - if FOutput.FData[OutputRawPos] > 0 then + if FOutputRaw.FData[OutputRawPos] >= 0 then begin LocalOutputErrorDeriv := FOutputError.FData[OutputRawPos]; end @@ -9479,7 +10673,17 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); LocalLearningErrorDeriv := (-FLearningRate) * LocalOutputErrorDeriv; if (LocalLearningErrorDeriv <> 0.0) then begin - //FArrNeurons[OutputD].Delta.MulAdd(LocalLearningErrorDeriv, PtrPreparedInput); + {$IFNDEF AVX64} + FArrNeurons[OutputD].Delta.MulAdd(LocalLearningErrorDeriv, PtrPreparedInput); + {$ELSE} + {$IFDEF Debug} + if localNumElements + MissedElements <> FArrNeurons[OutputD].Delta.Size + then FErrorProc('Error at TNNetConvolution.BackpropagateFastCPU(): neuron size doesn''t match.'); + {$ENDIF} + PtrNeuronDelta := FArrNeurons[OutputD].Delta.DataPtr; + asm_avx64_train_neuron + {$ENDIF} + {$IFDEF FPC} FArrNeurons[OutputD].FBiasDelta += LocalLearningErrorDeriv; {$ELSE} @@ -9487,10 +10691,10 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); FArrNeurons[OutputD].FBiasDelta + LocalLearningErrorDeriv; {$ENDIF} - if (FCalculatePrevLayerError) then + if (FCalculatePrevLayerError and not(FPointwise)) then begin LocalWeight := FArrNeurons[OutputD].Weights; - if FPointwise then + (*if FPointwise then begin {$IFNDEF AVX64} LocalPrevError.MulAdd(LocalDestPtr, LocalWeight.DataPtr, LocalOutputErrorDeriv, FInputCopy.Depth); @@ -9505,7 +10709,7 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); asm_avx64_prev_backprop; {$ENDIF} end - else + else *) begin if CanBackpropOnPos then begin @@ -9552,6 +10756,13 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); end; end; + if (FPointwise and FCalculatePrevLayerError) then + begin + FPrevLayerErrorPadded.DotProductsPointwise(FConcatedWInter, FOutputErrorDeriv); + LocalPrevError.Add(FPrevLayerErrorPadded); + end; + + (* FOutputErrorDeriv.Mul(-FLearningRate); InterErrorDeriv.InterleaveWithDepthFrom(FOutputErrorDeriv, FOutputErrorDeriv.SizeX * FOutputErrorDeriv.SizeY); InterInput.InterleaveWithDepthFrom(FInputPrepared, FInputPrepared.SizeX * FInputPrepared.SizeY); @@ -9573,15 +10784,18 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); ); end; end; + *) + + if FPadding > 0 then + begin + FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); + end; if (not FBatchUpdate) then begin for OutputD := 0 to MaxD do FArrNeurons[OutputD].UpdateWeights(FInertia); AfterWeightUpdate(); end; - - InterErrorDeriv.Free; - InterInput.Free; end; constructor TNNetConvolutionAbstract.Create(pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); @@ -9595,8 +10809,8 @@ constructor TNNetConvolutionAbstract.Create(pFeatureSize, pInputPadding, pStride if FPadding > 0 then begin FInputCopy := TNNetVolume.Create; - FPrevLayerErrorPadded := TNNetVolume.Create; end; + FPrevLayerErrorPadded := TNNetVolume.Create; end; destructor TNNetConvolutionAbstract.Destroy(); @@ -9604,8 +10818,8 @@ destructor TNNetConvolutionAbstract.Destroy(); if FPadding > 0 then begin FInputCopy.Free; - FPrevLayerErrorPadded.Free; end; + FPrevLayerErrorPadded.Free; inherited Destroy(); end; @@ -10080,6 +11294,245 @@ function TNNetInput.DisableErrorCollection: TNNetInput; Result := Self; end; +{ TNNetEmbedding } + +procedure TNNetEmbedding.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.Output.Size, 1, FEmbeddingSize); + FOutputError.ReSize(FOutput); + SetLength(FInputTokens, pPrevLayer.Output.Size); +end; + +constructor TNNetEmbedding.Create(pVocabSize, pEmbeddingSize: integer; + EncodeZero: integer = 0; ScaleEmbedding: TNeuralFloat = 2); +begin + inherited Create(); + FVocabSize := pVocabSize; + FEmbeddingSize := pEmbeddingSize; + FEncodeZero := (EncodeZero>0); + FScaleEmbedding := ScaleEmbedding; + FStruct[0] := pVocabSize; + FStruct[1] := pEmbeddingSize; + FStruct[2] := EncodeZero; + FFloatSt[0] := ScaleEmbedding; + if FNeurons.Count < 1 then AddMissingNeurons(1); + SetNumWeightsForAllNeurons(pVocabSize, 1, pEmbeddingSize); + InitDefault(); + AfterWeightUpdate(); +end; + +destructor TNNetEmbedding.Destroy; +begin + SetLength(FInputTokens, 0); + inherited Destroy; +end; + +procedure TNNetEmbedding.InitDefault; +begin + InitUniform(FScaleEmbedding); +end; + +procedure TNNetEmbedding.Compute(); +var + MaxToken, CntToken, CurrentToken: integer; + SourcePtr, DestPtr: TNeuralFloatArrPtr; + LocalWeights: TNNetVolume; + StartTime: double; +begin + StartTime := Now(); + MaxToken := FPrevLayer.Output.Size - 1; + LocalWeights := FNeurons[0].Weights; + FOutput.Fill(0); + {$IFDEF Debug} + if FEmbeddingSize=0 then + begin + FErrorProc('Embedding size can not be zero.'); + end; + if LocalWeights.Size <> FEmbeddingSize * FVocabSize then + begin + FErrorProc('Weights size do not match at TNNetEmbedding:' + + IntToStr(LocalWeights.Size)+' EmbeddingSize * Vocab Size:'+ + IntToStr(FEmbeddingSize * FVocabSize)+' '+ + ' EmbeddingSize: '+IntToStr(FEmbeddingSize)+ + ' Vocab Size: '+IntToStr(FVocabSize) + ); + end; + {$ENDIF} + for CntToken := 0 to MaxToken do + begin + CurrentToken := Round(FPrevLayer.Output.FData[CntToken]); + if CurrentToken >= FVocabSize then + begin + FErrorProc('Token is bigger than vocab size:'+ IntToStr(CurrentToken)); + CurrentToken := 0; + end; + if FEncodeZero or (CurrentToken>0) then + begin + FInputTokens[CntToken] := CurrentToken; + SourcePtr := LocalWeights.GetRawPtr(CurrentToken, 0, 0); + DestPtr := FOutput.GetRawPtr(CntToken, 0, 0); + // TODO: replace this call by a copy function. + TNNetVolume.MulAdd(DestPtr, SourcePtr, 1, FEmbeddingSize); + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +constructor TNNetEmbedding.Create; +begin + inherited; + + FVocabSize := 1; + FEmbeddingSize := 1; + FEncodeZero := False; + FScaleEmbedding := 2; + FStruct[0] := FVocabSize; + FStruct[1] := FEmbeddingSize; + FStruct[2] := 0; + FFloatSt[0] := 2; + if FNeurons.Count < 1 then AddMissingNeurons(1); + SetNumWeightsForAllNeurons(FVocabSize, 1, FEmbeddingSize); + InitDefault(); + AfterWeightUpdate(); +end; + +procedure TNNetEmbedding.Backpropagate(); +var + MaxToken, CntToken, CurrentToken: integer; + SourcePtr, DestPtr: TNeuralFloatArrPtr; + LocalWeights, LocalDelta: TNNetVolume; + StartTime: double; +begin + LocalWeights := FNeurons[0].Weights; + LocalDelta := FNeurons[0].Delta; + {$IFDEF Debug} + if FEmbeddingSize=0 then + begin + FErrorProc('Embedding size can not be zero.'); + end; + if FBackPropCallCurrentCnt > FDepartingBranchesCnt then + begin + FErrorProc('Backprop call count does not look right at TNNetEmbedding: '+IntToStr(FBackPropCallCurrentCnt)+' '+IntToStr(FDepartingBranchesCnt)); + end; + if LocalWeights.Size <> FEmbeddingSize * FVocabSize then + begin + FErrorProc('Weights size do not match at TNNetEmbedding:' + + IntToStr(LocalWeights.Size)+' EmbeddingSize * Vocab Size:'+ + IntToStr(FEmbeddingSize * FVocabSize)+' '+ + ' EmbeddingSize: '+IntToStr(FEmbeddingSize)+ + ' Vocab Size: '+IntToStr(FVocabSize) + ); + end; + if LocalDelta.Size <> LocalWeights.Size then + begin + FErrorProc('Weights size and Delta Size do not match at TNNetEmbedding.' + + ' Weights Size: '+IntToStr(LocalWeights.Size)+ + ' Delta Size: '+IntToStr(LocalDelta.Size) + ); + end; + //WriteLn( LocalWeights.GetSum() ); + {$ENDIF} + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + StartTime := Now(); + MaxToken := FPrevLayer.Output.Size - 1; + for CntToken := 0 to MaxToken do + begin + CurrentToken := FInputTokens[CntToken]; + if FEncodeZero or (CurrentToken>0) then + begin + SourcePtr := FOutputError.GetRawPtr(CntToken); + if FBatchUpdate + then DestPtr := LocalDelta.GetRawPtr(CurrentToken, 0, 0) + else DestPtr := LocalWeights.GetRawPtr(CurrentToken, 0, 0); + TNNetVolume.MulAdd(DestPtr, SourcePtr, FLearningRate, FEmbeddingSize); + end; + end; + FBackwardTime := FBackwardTime + (Now() - StartTime); +end; + +{ TNNetTokenAndPositionalEmbedding } + +procedure TNNetTokenAndPositionalEmbedding.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FPositionalEmbedding.ReSize(FOutput); + FPositionalEmbedding.PositionalEncoding(FEmbeddingSize); + if FScalePositional<>1 then FPositionalEmbedding.Mul(FScalePositional); +end; + +constructor TNNetTokenAndPositionalEmbedding.Create(pVocabSize, + pEmbeddingSize: integer; + EncodeZero: integer = 0; + ScaleEmbedding: TNeuralFloat = 2; + ScalePositional: TNeuralFloat = 1; + PositionalEmbeddingN: integer = 0); +begin + inherited Create(pVocabSize, pEmbeddingSize, EncodeZero, ScaleEmbedding); + FPositionalEmbedding := TNNetVolume.Create; + if PositionalEmbeddingN=0 + then FPositionalEmbeddingN := 10000 + else FPositionalEmbeddingN := PositionalEmbeddingN; + FScalePositional := ScalePositional; + FStruct[3] := FPositionalEmbeddingN; + FFloatSt[1] := FScalePositional; +end; + +destructor TNNetTokenAndPositionalEmbedding.Destroy; +begin + FPositionalEmbedding.Free; + inherited Destroy; +end; + +procedure TNNetTokenAndPositionalEmbedding.Compute(); +var + MaxToken, CntToken, CurrentToken: integer; + SourcePtr, SourcePtrPos, DestPtr: TNeuralFloatArrPtr; + LocalWeights: TNNetVolume; + StartTime: double; +begin + StartTime := Now(); + MaxToken := FPrevLayer.Output.Size - 1; + LocalWeights := FNeurons[0].Weights; + FOutput.Fill(0); + {$IFDEF Debug} + if FEmbeddingSize=0 then + begin + FErrorProc('Embedding size can not be zero.'); + end; + if LocalWeights.Size <> FEmbeddingSize * FVocabSize then + begin + FErrorProc('Weights size do not match at TNNetEmbedding:' + + IntToStr(LocalWeights.Size)+' EmbeddingSize * Vocab Size:'+ + IntToStr(FEmbeddingSize * FVocabSize)+' '+ + ' EmbeddingSize: '+IntToStr(FEmbeddingSize)+ + ' Vocab Size: '+IntToStr(FVocabSize) + ); + end; + {$ENDIF} + for CntToken := 0 to MaxToken do + begin + CurrentToken := Round(FPrevLayer.Output.FData[CntToken]); + if CurrentToken >= FVocabSize then + begin + FErrorProc('Token is bigger than vocab size:'+ IntToStr(CurrentToken)); + CurrentToken := 0; + end; + if FEncodeZero or (CurrentToken>0) then + begin + FInputTokens[CntToken] := CurrentToken; + SourcePtr := LocalWeights.GetRawPtr(CurrentToken, 0, 0); + SourcePtrPos := FPositionalEmbedding.GetRawPtr(CntToken, 0, 0); + DestPtr := FOutput.GetRawPtr(CntToken, 0, 0); + // TODO: replace this call by a copy function. + TNNetVolume.MulAdd(DestPtr, SourcePtr, 1, FEmbeddingSize); + TNNetVolume.MulAdd(DestPtr, SourcePtrPos, 1, FEmbeddingSize); + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + procedure TNNetInputBase.Compute; begin FOutputError.Fill(0); @@ -10186,25 +11639,38 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; var S, S2: TStringList; St: array [0..csNNetMaxParameterIdx] of integer; + Ft: array [0..csNNetMaxParameterIdx] of TNeuralFloat; aL: array of TNNetLayer; aIdx: TNeuralIntegerArray; IdxCnt: integer; I: integer; + ClassNameStr: string; + SCount: integer; + fmt : TFormatSettings; begin Result := nil; S := CreateTokenizedStringList(strData,':'); S2 := CreateTokenizedStringList(strData,';'); - if S.Count >= 2 then + fmt := GetDefaultNumericFormat; + SCount := S.Count; + + if SCount >= 2 then begin + ClassNameStr := S[0]; + // This code is good for debug + // if ClassNameStr = 'TNNetSum' then + // begin + // WriteLn('hello'); + // end; + for I := Low(St) to High(St) do St[i] := 0; S2.DelimitedText := S[1]; if S2.Count > 0 then begin for I := 0 to Min(S2.Count - 1, High(St)) do St[I] := StrToInt(S2[I]); end; - - if S.Count = 3 then + if SCount >= 3 then begin S2.DelimitedText := S[2]; @@ -10225,13 +11691,33 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; end; end; - + if SCount >= 4 then + begin + for I := Low(Ft) to High(Ft) do Ft[i] := 0; + S2.DelimitedText := S[3]; + if S2.Count > 0 then + begin + for I := 0 to Min(S2.Count - 1, High(St)) do Ft[I] := StrToFloat(S2[I], fmt); + end; + end + else + // backward compatibility + begin + for I := Low(Ft) to High(Ft) do Ft[i] := St[i]; + end; + if S[0] = 'TNNetInput' then Result := TNNetInput.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetIdentity' then Result := TNNetIdentity.Create() else + if S[0] = 'TNNetTransposeXD' then Result := TNNetTransposeXD.Create() else + if S[0] = 'TNNetTransposeYD' then Result := TNNetTransposeYD.Create() else if S[0] = 'TNNetDebug' then Result := TNNetDebug.Create(St[0], St[1]) else + if S[0] = 'TNNetDotProducts' then Result := TNNetDotProducts.Create(St[0], St[1]) else if S[0] = 'TNNetPad' then Result := TNNetPad.Create(St[0]) else + if S[0] = 'TNNetPadXY' then Result := TNNetPadXY.Create(St[0], St[1]) else + if S[0] = 'TNNetCrop' then Result := TNNetCrop.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetIdentityWithoutBackprop' then Result := TNNetIdentityWithoutBackprop.Create() else if S[0] = 'TNNetReLU' then Result := TNNetReLU.Create() else + if S[0] = 'TNNetReLUP' then Result := TNNetReLUP.Create() else if S[0] = 'TNNetSwish' then Result := TNNetSwish.Create() else if S[0] = 'TNNetHardSwish' then Result := TNNetHardSwish.Create() else if S[0] = 'TNNetSwish6' then Result := TNNetSwish6.Create() else @@ -10256,18 +11742,17 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetLocalConnect' then Result := TNNetLocalConnect.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetLocalProduct' then Result := TNNetLocalProduct.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetLocalConnectReLU' then Result := TNNetLocalConnectReLU.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetMulLearning' then Result := TNNetMulLearning.Create(St[0]) else - if S[0] = 'TNNetMulByConstant' then Result := TNNetMulByConstant.Create(St[0]) else + if S[0] = 'TNNetMulLearning' then Result := TNNetMulLearning.Create(Ft[0]) else + if S[0] = 'TNNetMulByConstant' then Result := TNNetMulByConstant.Create(Ft[0]) else if S[0] = 'TNNetNegate' then Result := TNNetNegate.Create() else if S[0] = 'TNNetLayerSoftMax' then Result := TNNetSoftMax.Create() else - if S[0] = 'TNNetSoftMax' then Result := TNNetSoftMax.Create() else + if S[0] = 'TNNetSoftMax' then Result := TNNetSoftMax.Create(St[0]) else + if S[0] = 'TNNetPointwiseSoftMax' then Result := TNNetPointwiseSoftMax.Create(St[0]) else if S[0] = 'TNNetConvolution' then Result := TNNetConvolution.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionReLU' then Result := TNNetConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetConvolutionGeLU' then Result := TNNetConvolutionGeLU.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetConvolutionSwish6' then Result := TNNetConvolutionSwish6.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionLinear' then Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionSwish' then Result := TNNetConvolutionSwish.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionHardSwish' then Result := TNNetConvolutionHardSwish.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetConvolutionLinear' then Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetGroupedConvolutionLinear' then Result := TNNetGroupedConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else if S[0] = 'TNNetGroupedConvolutionReLU' then Result := TNNetGroupedConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else if S[0] = 'TNNetGroupedPointwiseConvLinear' then Result := TNNetGroupedPointwiseConvLinear.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else @@ -10276,12 +11761,12 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetConvolutionSharedWeights' then Result := TNNetConvolutionSharedWeights.Create(FLayers[St[5]]) else if S[0] = 'TNNetDepthwiseConv' then Result := TNNetDepthwiseConv.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetDepthwiseConvReLU' then Result := TNNetDepthwiseConvReLU.Create(St[0], St[1], St[2], St[3]) else - if S[0] = 'TNNetDepthwiseConvGeLU' then Result := TNNetDepthwiseConvGeLU.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetDepthwiseConvLinear' then Result := TNNetDepthwiseConvLinear.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetPointwiseConv' then Result := TNNetPointwiseConv.Create(St[0], St[4]) else if S[0] = 'TNNetPointwiseConvReLU' then Result := TNNetPointwiseConvReLU.Create(St[0], St[4]) else if S[0] = 'TNNetPointwiseConvLinear' then Result := TNNetPointwiseConvLinear.Create(St[0], St[4]) else if S[0] = 'TNNetMaxPool' then Result := TNNetMaxPool.Create(St[0], St[1], St[2]) else + if S[0] = 'TNNetMaxPoolWithPosition' then Result := TNNetMaxPoolWithPosition.Create(St[0], St[1], St[2], St[3], St[4], St[5]) else if S[0] = 'TNNetMaxPoolPortable' then Result := TNNetMaxPoolPortable.Create(St[0], St[1], St[2]) else if S[0] = 'TNNetMinPool' then Result := TNNetMinPool.Create(St[0], St[1], St[2]) else if S[0] = 'TNNetAvgPool' then Result := TNNetAvgPool.Create(St[0]) else @@ -10304,6 +11789,7 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetLayerMaxNormalization' then Result := TNNetLayerMaxNormalization.Create() else if S[0] = 'TNNetLayerStdNormalization' then Result := TNNetLayerStdNormalization.Create() else if S[0] = 'TNNetMovingStdNormalization' then Result := TNNetMovingStdNormalization.Create() else + if S[0] = 'TNNetMovingScale' then Result := TNNetMovingScale.Create(Ft[0],Ft[1]) else if S[0] = 'TNNetChannelStdNormalization' then Result := TNNetChannelStdNormalization.Create() else if S[0] = 'TNNetScaleLearning' then Result := TNNetChannelStdNormalization.Create() else if S[0] = 'TNNetChannelBias' then Result := TNNetChannelBias.Create() else @@ -10318,6 +11804,9 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetLocalResponseNorm2D' then Result := TNNetLocalResponseNorm2D.Create(St[0]) else if S[0] = 'TNNetLocalResponseNormDepth' then Result := TNNetLocalResponseNormDepth.Create(St[0]) else if S[0] = 'TNNetAddAndDiv' then Result := TNNetAddAndDiv.Create(St[0], St[1]) else + if S[0] = 'TNNetAddPositionalEmbedding' then Result := TNNetAddPositionalEmbedding.Create(St[0]) else + if S[0] = 'TNNetEmbedding' then Result := TNNetEmbedding.Create(St[0], St[1], St[2], Ft[0]) else + if S[0] = 'TNNetTokenAndPositionalEmbedding' then Result := TNNetTokenAndPositionalEmbedding.Create(St[0], St[1], St[2], Ft[0], Ft[1], St[3]) else raise Exception.create(strData + ' not allowed in CreateLayer.'); end else @@ -10461,6 +11950,65 @@ function TNNet.AddGroupedConvolution(Conv2d: TNNetConvolutionClass; SetLength(EachGroupOutput, 0); end; +function TNNet.AddGroupedDotProducts(A, B: TNNetLayer; Groups: integer; ChannelInterleaving: boolean): TNNetLayer; +var + PreviousLayer: TNNetLayer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + GroupCnt: integer; +begin + PreviousLayer := GetLastLayer(); + Result := PreviousLayer; + SetLength(EachGroupOutput, Groups); + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Groups; + if Groups = 1 then + begin + Result := AddLayer( TNNetDotProducts.Create(A, B) ); + end; + if Groups > 1 then + begin + for GroupCnt := 0 to Groups - 1 do + begin + if ChannelInterleaving + then AddLayerAfter( TNNetSplitChannelEvery.Create(Groups, GroupCnt), PreviousLayer) + else AddLayerAfter( TNNetSplitChannels.Create(GroupCnt*InputChannelsPerGroup, InputChannelsPerGroup), PreviousLayer); + EachGroupOutput[GroupCnt] := AddLayer( TNNetDotProducts.Create(A, B) ); + end; + Result := AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); + end; + SetLength(EachGroupOutput, 0); +end; + +function TNNet.AddGroupedPointwiseSoftMax(Groups: integer; + ChannelInterleaving: boolean): TNNetLayer; +var + PreviousLayer: TNNetLayer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + GroupCnt: integer; +begin + PreviousLayer := GetLastLayer(); + Result := PreviousLayer; + SetLength(EachGroupOutput, Groups); + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Groups; + if Groups = 1 then + begin + Result := AddLayer( TNNetPointwiseSoftMax.Create() ); + end; + if Groups > 1 then + begin + for GroupCnt := 0 to Groups - 1 do + begin + if ChannelInterleaving + then AddLayerAfter( TNNetSplitChannelEvery.Create(Groups, GroupCnt), PreviousLayer) + else AddLayerAfter( TNNetSplitChannels.Create(GroupCnt*InputChannelsPerGroup, InputChannelsPerGroup), PreviousLayer); + EachGroupOutput[GroupCnt] := AddLayer( TNNetPointwiseSoftMax.Create() ); + end; + Result := AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); + end; + SetLength(EachGroupOutput, 0); +end; + function TNNet.AddAutoGroupedPointwiseConv( Conv2d: TNNetGroupedPointwiseConvClass; MinChannelsPerGroupCount, pNumFeatures: integer; @@ -10860,6 +12408,11 @@ procedure TNNet.AddToWeightAverage(NewElement: TNNet; CurrentElementCount: integ MulMulAddWeights(CurrentElementCount/(CurrentElementCount+1), 1/(CurrentElementCount+1), NewElement); end; +function TNNet.GetFirstLayer: TNNetLayer; +begin + Result := FLayers[0]; +end; + function TNNet.AddLayerAfter(pLayer, pAfterLayer: TNNetLayer): TNNetLayer; begin if Assigned(pAfterLayer) then @@ -11484,6 +13037,84 @@ function TNNet.NormalizeMaxAbsoluteDelta(NewMax: TNeuralFloat): TNeuralFloat; end; end; +function TNNet.NormalizeMinAbsoluteDeltaPerLayer(MinDelta: TNeuralFloat + ): TNeuralFloat; +var + LayerCnt, LastLayerIdx: integer; + MaxAbsDelta: TNeuralFloat; +begin + LastLayerIdx := GetLastLayerIdx(); + Result := 1; + if FLayers.Count > 0 then + begin + for LayerCnt := 0 to LastLayerIdx do + begin + if not(FLayers[LayerCnt].LinkedNeurons) then + begin + MaxAbsDelta := FLayers[LayerCnt].GetMaxAbsoluteDelta(); + if (MaxAbsDelta < MinDelta) and (MaxAbsDelta > 0) then + begin + FLayers[LayerCnt].MulDeltas( MinDelta/MaxAbsDelta ); + Result := Max(Result, MinDelta/MaxAbsDelta ); + FMaxDeltaLayer := LayerCnt; + end; + end; + end; + end; +end; + +function TNNet.NormalizeMinMaxAbsoluteDeltaPerLayer(MinDelta, + MaxDelta: TNeuralFloat): TNeuralFloat; +var + LayerCnt, LastLayerIdx: integer; + MaxAbsDelta: TNeuralFloat; +begin + LastLayerIdx := GetLastLayerIdx(); + Result := 1; + if FLayers.Count > 0 then + begin + for LayerCnt := 0 to LastLayerIdx do + begin + if not(FLayers[LayerCnt].LinkedNeurons) then + begin + MaxAbsDelta := FLayers[LayerCnt].GetMaxAbsoluteDelta(); + if (MaxAbsDelta > 0) then + begin + if (MaxAbsDelta < MinDelta) then + begin + FLayers[LayerCnt].MulDeltas( MinDelta/MaxAbsDelta ); + //WriteLn(LayerCnt, ' Force Min:', (MinDelta/MaxAbsDelta):8:4); + end + else if (MaxAbsDelta > MaxDelta) then + begin + FLayers[LayerCnt].MulDeltas( MaxDelta/MaxAbsDelta ); + Result := Min(Result, MaxDelta/MaxAbsDelta ); + FMaxDeltaLayer := LayerCnt; + //WriteLn(LayerCnt, ' Force Max:', (MaxDelta/MaxAbsDelta):8:4); + end; + end; + end; + end; + end; +end; + +procedure TNNet.NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); +var + LayerCnt, LastLayerIdx: integer; +begin + LastLayerIdx := GetLastLayerIdx(); + if FLayers.Count > 0 then + begin + for LayerCnt := 0 to LastLayerIdx do + begin + if not(FLayers[LayerCnt].LinkedNeurons) then + begin + FLayers[LayerCnt].NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta); + end; + end; + end; +end; + procedure TNNet.ClearInertia(); var LayerCnt: integer; @@ -11631,6 +13262,26 @@ procedure TNNet.UpdateWeights(); end; end; +procedure TNNet.CalcAdamDelta(); +var + LayerCnt: integer; +begin + for LayerCnt := 0 to GetLastLayerIdx() do + begin + FLayers[LayerCnt].CalcAdamDelta(); + end; +end; + +procedure TNNet.UpdateWeightsAdam(); +var + LayerCnt: integer; +begin + for LayerCnt := 0 to GetLastLayerIdx() do + begin + FLayers[LayerCnt].UpdateWeightsAdam(); + end; +end; + procedure TNNet.ClearDeltas(); var LayerCnt: integer; @@ -11801,6 +13452,11 @@ procedure TNNet.DebugStructure(); if FLayers.Count > 1 then begin + WriteLn( + 'Has AVX: ', FLayers[0].Output.HasAVX, + ' Has AVX2: ', FLayers[0].Output.HasAVX2, + ' Has AVX512: ', FLayers[0].Output.HasAVX512 + ); for LayerCnt := 0 to GetLastLayerIdx() do begin WeightCount := FLayers[LayerCnt].CountWeights(); @@ -11824,6 +13480,10 @@ procedure TNNet.DebugStructure(); ' Weight Sum:', FLayers[LayerCnt].GetWeightSum():8:4, ' Bias Sum:', FLayers[LayerCnt].GetBiasSum():8:4 ); + {$IFDEF OpenCL} + if FLayers[LayerCnt].HasOpenCL then write(' H'); + if FLayers[LayerCnt].HasOpenCL and FLayers[LayerCnt].ShouldOpenCL then write(' OpenCl'); + {$ENDIF} if Assigned(FLayers[LayerCnt].PrevLayer) then begin @@ -11911,7 +13571,7 @@ procedure TNNet.MulAddWeights(Value: TNeuralFloat; Origin: TNNet); function TNNet.SaveDataToString(): string; var LayerCnt: integer; - S: TStringList; + S: TNNetStringList; begin S := CreateTokenizedStringList('!'); if FLayers.Count > 0 then @@ -11921,7 +13581,7 @@ function TNNet.SaveDataToString(): string; S.Add( FLayers[LayerCnt].SaveDataToString() ); end; end; - Result := S.DelimitedText; + Result := S.GetDelimitedTextFast(); S.Free; end; @@ -12043,29 +13703,14 @@ procedure TNNet.LoadFromFile(filename: string); end; function TNNet.Clone(): TNNet; -//var NNData: String; -//begin -// NNData := SaveToString(); -// -// Result := TNNet.Create; -// Result.LoadFromString(NNData); -//end; - -var i : integer; - layStruct : string; +var + NNData: String; begin - // it's not optimal but it covers the basis - layStruct := SaveStructureToString(); - - Result := TNNet.Create; - Result.LoadStructureFromString(layStruct); - - for i := 0 to FLayers.Count - 1 do - begin - // copy weights... basically reproduces LoadDataFromString but without all the overhead - Result.fLayers[i].Assign( FLayers[i] ); - Result.FLayers[i].AfterWeightUpdate; - end; + NNData := SaveStructureToString(); + Result := TNNet.Create; + Result.LoadStructureFromString(NNData); + Result.CopyWeights(Self); + NNData := ''; end; procedure TNNet.LoadDataFromString(strData: string); @@ -12133,7 +13778,7 @@ procedure TNNet.LoadDataFromFile(filename: string); begin FErrorProc ( - 'TNNet.LoadFromString - wrong number of arguments: ' + IntToStr(S.Count) + 'TNNet.LoadDataFromFile - wrong number of arguments: ' + IntToStr(S.Count) ); end; @@ -12596,6 +14241,27 @@ function TNNetLayer.InitSELU(Value: TNeuralFloat): TNNetLayer; Result := Self; end; +function TNNetLayer.InitAdam(Beta1, Beta2, Epsilon: TNeuralFloat): TNNetLayer; +var + Cnt: integer; +begin + FBeta1 := Beta1; + FBeta2 := Beta2; + FEpsilon := Epsilon; + FBeta1Decay := 1; + FBeta2Decay := 1; + + if (FNeurons.Count > 0) then + begin + for Cnt := 0 to FNeurons.Count-1 do + begin + FNeurons[Cnt].InitAdam(Self); + end; + AfterWeightUpdate(); + end; + Result := Self; +end; + procedure TNNetLayer.InitDefault(); begin InitGlorotBengioUniform(); @@ -12727,13 +14393,15 @@ function TNNetLayer.GetMaxAbsoluteDelta(): TNeuralFloat; var Cnt: integer; MaxValue: TNeuralFloat; + MaxNeurons: integer; begin - if FNeurons.Count > 0 then + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then begin Result := FNeurons[0].Delta.GetMaxAbs(); - if FNeurons.Count > 1 then + if MaxNeurons > 0 then begin - for Cnt := 0 to FNeurons.Count-1 do + for Cnt := 0 to MaxNeurons do begin MaxValue := FNeurons[Cnt].Delta.GetMaxAbs(); if MaxValue > Result then Result := MaxValue; @@ -12746,6 +14414,26 @@ function TNNetLayer.GetMaxAbsoluteDelta(): TNeuralFloat; end; end; +procedure TNNetLayer.NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); +var + Cnt: integer; + MaxValue: TNeuralFloat; + MaxNeurons: integer; +begin + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then + begin + for Cnt := 0 to MaxNeurons do + begin + MaxValue := FNeurons[Cnt].Delta.GetMaxAbs(); + if (MaxDelta <> MaxValue) and (MaxValue>0) then + begin + FNeurons[Cnt].Delta.Mul(MaxDelta/MaxValue); + end; + end; + end +end; + function TNNetLayer.GetMinDelta(): TNeuralFloat; var Cnt: integer; @@ -13168,15 +14856,32 @@ procedure TNNetLayer.NormalizeWeights(VMax: TNeuralFloat); end; function TNNetLayer.SaveDataToString(): string; +//var +// S: TStringList; +// Cnt: integer; +//begin +// S := TStringList.Create; +// S.Sorted := false; +// S.Delimiter := '['; +// S.StrictDelimiter := true; + +// if FNeurons.Count > 0 then +// begin +// for Cnt := 0 to FNeurons.Count-1 do +// begin +// S.Add(FNeurons[Cnt].SaveToString()); +// end; +// end; + +// Result := S.DelimitedText; +// S.Free; +//end; var - S: TStringList; + S: TNNetStringList; Cnt: integer; begin - S := TStringList.Create; - S.Sorted := false; - S.Delimiter := '['; - S.StrictDelimiter := true; - + S := CreateTokenizedStringList('['); + S.SetCapacity(FNeurons.Count); if FNeurons.Count > 0 then begin for Cnt := 0 to FNeurons.Count-1 do @@ -13185,7 +14890,7 @@ function TNNetLayer.SaveDataToString(): string; end; end; - Result := S.DelimitedText; + Result := S.GetDelimitedTextFast(); S.Free; end; @@ -13222,14 +14927,24 @@ procedure TNNetLayer.LoadDataFromString(strData: string); function TNNetLayer.SaveStructureToString(): string; var I: integer; + fmt : TFormatSettings; begin Result := ClassName + ':'; + fmt := GetDefaultNumericFormat; for I := Low(FStruct) to High(FStruct) do begin if I > 0 then Result := Result + ';'; Result := Result + IntToStr(FStruct[I]); end; + + Result := Result + '::'; + + for I := Low(FFloatSt) to High(FFloatSt) do + begin + if I > 0 then Result := Result + ';'; + Result := Result + FloatToStr(FFloatSt[I], fmt); + end; end; procedure TNNetLayer.SetBatchUpdate(pBatchUpdate: boolean); @@ -13238,6 +14953,49 @@ procedure TNNetLayer.SetBatchUpdate(pBatchUpdate: boolean); end; procedure TNNetLayer.UpdateWeights(); +var + Cnt, MaxNeurons: integer; +begin + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then + begin + if FInertia > 0 then + begin + for Cnt := 0 to MaxNeurons do + begin + FNeurons[Cnt].UpdateWeights(FInertia); + end; + end + else + begin + for Cnt := 0 to MaxNeurons do + begin + FNeurons[Cnt].UpdateWeightsWithoutInertia(); + end; + end; + end; + AfterWeightUpdate(); +end; + +procedure TNNetLayer.CalcAdamDelta(); +var + Cnt, MaxNeurons: integer; +begin + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then + begin + FBeta1Decay := FBeta1Decay * FBeta1; + FBeta2Decay := FBeta2Decay * FBeta2; + FOneMinusBeta1Decay := (1 - FBeta1Decay); + FOneMinusBeta2Decay := (1 - FBeta2Decay); + for Cnt := 0 to MaxNeurons do + begin + FNeurons[Cnt].CalcAdamDelta(); + end; + end; +end; + +procedure TNNetLayer.UpdateWeightsAdam(); var Cnt, MaxNeurons: integer; begin @@ -13246,7 +15004,7 @@ procedure TNNetLayer.UpdateWeights(); begin for Cnt := 0 to MaxNeurons do begin - FNeurons[Cnt].UpdateWeights(FInertia); + FNeurons[Cnt].UpdateWeightsAdam(); end; end; AfterWeightUpdate(); @@ -13369,13 +15127,17 @@ constructor TNNetNeuron.Create(); FBiasDelta := 0; FWeights := TNNetVolume.Create(1,1,1); FBackInertia := TNNetVolume.Create(1,1,1); + FBackInertia2 := TNNetVolume.Create(1,1,1); FDelta := TNNetVolume.Create(1,1,1); + FDelta2 := TNNetVolume.Create(1,1,1); end; destructor TNNetNeuron.Destroy(); begin FDelta.Free; + FDelta2.Free; FBackInertia.Free; + FBackInertia2.Free; FWeights.Free; inherited Destroy(); end; @@ -13385,6 +15147,7 @@ procedure TNNetNeuron.InitUniform(Value: TNeuralFloat = 1); FWeights.InitUniform(Value); FBiasWeight := 0; FBackInertia.Fill(0); + FBackInertia2.Fill(0); FDelta.Fill(0); FBiasInertia := 0; FBiasDelta := 0; @@ -13395,6 +15158,7 @@ procedure TNNetNeuron.InitGaussian(Value: TNeuralFloat); FWeights.InitGaussian(Value); FBiasWeight := 0; FBackInertia.Fill(0); + FBackInertia2.Fill(0); FDelta.Fill(0); FBiasInertia := 0; FBiasDelta := 0; @@ -13456,6 +15220,16 @@ procedure TNNetNeuron.InitSELU(Value: TNeuralFloat); InitGaussian( Value * Sqrt(1/FWeights.Size) ); end; +procedure TNNetNeuron.InitAdam(ParentLayer: TNNetLayer); +begin + FBackInertia2.Resize(FBackInertia); + FDelta2.Resize(FDelta); + FBackInertia2.Fill(0); + FDelta2.Fill(0); + FBiasInertia2 := 0; + FParentLayer := ParentLayer; +end; + procedure TNNetNeuron.Fill(Value: TNeuralFloat); begin FWeights.Fill(Value) ; @@ -13474,13 +15248,79 @@ procedure TNNetNeuron.AddInertia(); // (BackInertia*Inertia) + (Delta*(1-Inertia)) procedure TNNetNeuron.UpdateWeights(Inertia:TNeuralFloat); begin - FBiasDelta := FBiasDelta * ( 1 - Inertia ); - FBiasInertia := FBiasInertia * Inertia; - FBiasInertia := FBiasInertia + FBiasDelta; - FBiasWeight := FBiasWeight + FBiasInertia; + if (Inertia>0) then + begin + FBiasDelta := FBiasDelta * ( 1 - Inertia ); + FBiasInertia := FBiasInertia * Inertia; + FBiasInertia := FBiasInertia + FBiasDelta; + FBiasWeight := FBiasWeight + FBiasInertia; - FBackInertia.MulMulAdd(Inertia, 1-Inertia, FDelta); - FWeights.Add(FBackInertia); + FBackInertia.MulMulAdd(Inertia, 1-Inertia, FDelta); + FWeights.Add(FBackInertia); + end + else + begin + FWeights.Add(FDelta); + FBiasWeight := FBiasWeight + FBiasDelta; + end; + ClearDelta(); +end; + +procedure TNNetNeuron.UpdateWeightsWithoutInertia(); +begin + FWeights.Add(FDelta); + FBiasWeight := FBiasWeight + FBiasDelta; + ClearDelta(); +end; + +// https://github.com/theroyakash/Adam +// https://github.com/theroyakash/Adam/blob/master/src/Screen%20Shot%202020-02-05%20at%2010.23.14%20PM.png +procedure TNNetNeuron.CalcAdamDelta(); +begin + // Weights Update + FDelta2.Copy(FDelta); + FDelta2.Mul(FDelta2); + + FBackInertia.MulMulAdd(FParentLayer.FBeta1, 1-FParentLayer.FBeta1, FDelta); + FBackInertia2.MulMulAdd(FParentLayer.FBeta2, 1-FParentLayer.FBeta2, FDelta2); + (* + if random(100)=00 then + WriteLn( + 'D1:', FDelta.GetMaxAbs():8:4, + ' D2:', FDelta2.GetMaxAbs():8:4, + ' I1:', FBackInertia.GetMaxAbs():8:4, + ' I2:', FBackInertia2.GetMaxAbs():8:4 + ); + *) + FDelta2.Copy(FBackInertia2); + FDelta2.Divi(FParentLayer.FOneMinusBeta2Decay); + FDelta2.VSqrt(); + FDelta2.Add(FParentLayer.FEpsilon); + + FDelta.Fill(FParentLayer.FLearningRate/FParentLayer.FOneMinusBeta1Decay); + FDelta.Mul(FBackInertia); + FDelta.Divi(FDelta2); + (* + if random(100)=00 then + WriteLn( + 'CALC D1:', FDelta.GetMaxAbs():8:4, + ' CALC D2:', FDelta2.GetMaxAbs():8:4 + ); + *) + + // Bias Update + FBiasInertia := FParentLayer.FBeta1 * FBiasInertia + (1 - FParentLayer.FBeta1) * FBiasDelta; + FBiasInertia2 := FParentLayer.FBeta2 * FBiasInertia2 + (1 - FParentLayer.FBeta2) * (FBiasDelta*FBiasDelta); + + FBiasDelta := + FParentLayer.FLearningRate*( (FBiasInertia/FParentLayer.FOneMinusBeta1Decay) / (sqrt(FBiasInertia2/FParentLayer.FOneMinusBeta2Decay)+FParentLayer.FEpsilon) ) ; +end; + +procedure TNNetNeuron.UpdateWeightsAdam(); +begin + // CalcAdamDelta() must be called before UpdateWeightsAdam; + FWeights.Add(FDelta); + FBiasWeight := FBiasWeight + FBiasDelta; ClearDelta(); end; diff --git a/neural/neuralthread.pas b/neural/neuralthread.pas index cf6f5c0c..a5a857f8 100644 --- a/neural/neuralthread.pas +++ b/neural/neuralthread.pas @@ -126,6 +126,7 @@ TNeuralThreadList = class (TObjectList) procedure NeuralInitCriticalSection(var pCritSec: TRTLCriticalSection); procedure NeuralDoneCriticalSection(var pCritSec: TRTLCriticalSection); function GetProcessId(): {$IFDEF FPC}integer{$ELSE}integer{$ENDIF}; + procedure DebugThreadCount(); implementation @@ -226,6 +227,7 @@ function GetProcessId(): integer; begin GetProcessId := {$IFDEF WINDOWS}GetCurrentProcessId(){$ELSE}fpgetppid(){$ENDIF}; end; + {$ELSE} function GetProcessId(): integer; begin @@ -238,6 +240,11 @@ function GetProcessId(): integer; end; {$ENDIF} +procedure DebugThreadCount; +begin + WriteLn('CPU threads reported by the operating system: ', NeuralDefaultThreadCount,'.'); +end; + function fNTL: TNeuralThreadList; begin Result := vNTL; diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index f7ce2819..f8f16b32 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -111,7 +111,6 @@ TVolume = class(TObject) FSizeY: integer; FDepth: integer; FTag: array[0..1] of integer; - FFormatSettings: TFormatSettings; FLastPos: integer; function GetTag: integer; {$IFDEF Release} inline; {$ENDIF} procedure SetTag(I: integer); {$IFDEF Release} inline; {$ENDIF} @@ -150,6 +149,9 @@ TVolume = class(TObject) procedure AddAtDepth(pDepth: integer; Value: T); overload; {$IFDEF Release} inline; {$ENDIF} procedure AddAtDepth(pDepth: integer; Original: TVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure AddFromDepthToDepth(Original: TVolume; FromDepth, ToDepth: integer); {$IFDEF Release} inline; {$ENDIF} + procedure AddTransposingXD(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} + procedure AddTransposingYD(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} + procedure AddTransposingAs2D(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} procedure CopyFromDepthToDepth(Original: TVolume; FromDepth, ToDepth: integer); {$IFDEF Release} inline; {$ENDIF} procedure AddLayers(A,B: TVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure Sub(x, y, d: integer; Value: T); overload; {$IFDEF Release} inline; {$ENDIF} @@ -196,14 +198,24 @@ TVolume = class(TObject) procedure Copy(var Original: array of T); overload; procedure Copy(var Original: array of byte); overload; procedure Copy(Original: TBits; pFlase: T = -0.5; pTrue: T = +0.5); overload; - procedure CopyPadding(Original: TVolume; Padding: integer); {$IFDEF Release} inline; {$ENDIF} + procedure CopyPadding(Original: TVolume; Padding: integer); {$IFDEF Release} inline; {$ENDIF} overload; + procedure CopyPadding(Original: TVolume; PaddingX, PaddingY: integer); {$IFDEF Release} inline; {$ENDIF} overload; procedure CopyCropping(Original: TVolume; StartX, StartY, pSizeX, pSizeY: integer); procedure CopyResizing(Original: TVolume; NewSizeX, NewSizeY: integer); - procedure CopyNoChecks(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} + procedure CopyNoChecks(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} overload; + procedure CopyNoChecks(var Original: array of byte); overload; + procedure CopyNoChecksIntArr(var Original: array of integer); overload; + procedure CopyReversedNoChecksIntArr(var Original: array of integer); overload; + procedure CopyNoChecks(var Original: string); overload; + procedure CopyReversedNoChecks(var Original: string); overload; procedure CopyChannels(Original: TVolume; aChannels: array of integer); + // Transpose Copying + procedure CopyTransposingXD(Original: TVolume); + procedure CopyTransposingYD(Original: TVolume); + procedure CopyTransposingAs2D(Original: TVolume); procedure Define(Original: array of T); function DotProduct(Original: TVolume): T; overload; {$IFDEF Release} inline; {$ENDIF} - class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; + class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} class function Product(PtrA: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} function SumDiff(Original: TVolume): T; {$IFDEF Release} inline; {$ENDIF} procedure DebugDiff(Original: TVolume; Limit: Single = 0); @@ -232,6 +244,8 @@ TVolume = class(TObject) function GetSmallestIdxInRange(StartPos, Len: integer): integer; function GetStdDeviation(): T; {$IFDEF Release} inline; {$ENDIF} function GetMagnitude(): T; {$IFDEF Release} inline; {$ENDIF} + function GetEntropy(): T; + function GetPerplexity(): T; procedure FlipX(); procedure FlipY(); procedure IncTag(); {$IFDEF Release} inline; {$ENDIF} @@ -272,17 +286,31 @@ TVolume = class(TObject) procedure LoadFromString(strData: string); // bit operations - procedure CopyAsBits(var Original: array of byte; pFlase: T = -0.5; pTrue: T = +0.5); overload; + procedure CopyAsBits(var Original: array of byte; pFalse: T = -0.5; pTrue: T = +0.5; CanResize: boolean = True); overload; + procedure CopyAsBits(Original: string; pFalse: T = -0.5; pTrue: T = +0.5; CanResize: boolean = True); overload; + procedure CopyAsBitsReversed(Original: string; pFalse: T = -0.5; pTrue: T = +0.5); procedure ReadAsBits(var Dest: array of byte; Threshold: T = 0.0); - // Classification Functions + // Classification Functions (SetClass is similar to One Hot Encoding) procedure SetClass(pClass: integer; value: T); {$IFNDEF FPC} overload; {$ENDIF} procedure SetClass(pClass: integer; TrueValue, FalseValue: T); {$IFNDEF FPC} overload; {$ENDIF} procedure SetClassForHiperbolicTangent(pClass: integer); procedure SetClassForReLU(pClass: integer); procedure SetClassForSoftMax(pClass: integer); + // GetClass is similar to argmax function GetClass(): integer; + function GetClassOnPixel(X, Y: integer): integer; function SoftMax(): T; + procedure PointwiseSoftMax(); + + // Encoding Functions + procedure OneHotEncoding(aTokens: array of integer); overload; + procedure OneHotEncoding(aTokens: string); overload; + procedure OneHotEncodingReversed(aTokens: string); overload; + procedure OneHotEncodingReversed(var aTokens: array of integer); overload; + // Sets positional embedding as per paper "Attention Is All You Need". + // https://arxiv.org/abs/1706.03762 . + procedure PositionalEncoding(n: integer = 10000); // Color Encoding Functions procedure RgbToHsv(); {$IFDEF Release} inline; {$ENDIF} @@ -310,6 +338,13 @@ TVolume = class(TObject) property Depth: integer read FDepth; end; + TNNetToken = record + Token: Integer; + Score: TNeuralFloat; + end; + + TNNetTokenArray = array of TNNetToken; + { TNNetVolume } {$IFDEF FPC} TNNetVolume = class (specialize TVolume) @@ -323,9 +358,53 @@ TNNetVolume = class (TVolume) function GetMemSize(): integer; {$IFDEF Release} inline; {$ENDIF} procedure CalculateLocalResponseFrom2D(Original: TNNetVolume; pSize:integer; alpha, beta: TNeuralFloat ); procedure CalculateLocalResponseFromDepth(Original: TNNetVolume; pSize:integer; alpha, beta: TNeuralFloat ); + procedure GetTokenArray(var TokenArray: TNNetTokenArray); + (* + Assume that "As" and "Bs" contain lists of vectors "A" and "B". + "NumAs and NumBs" are the number of elements in the + The DotProducts function runs dot products for all combinations of "As" and "Bs". + "Convolutions" are "dot products". + Assume 3 matrixes 2x2 of the type TNNetVolume: A, B and B transposed (BT) + Assume c,d,e,f,x,y,z,w are of the type TNeuralFloat. + + These are the matrixes A, B and BT (B Transposed): + A B BT + c d x y x z + e f z w y w + + A = [c, d, e, f] + B = [x, y, z, w] + + a1 = [c, d] + a2 = [e, f] + + b1 = [x, y] + b2 = [z, w] + + bt1 = [x, z] + bt2 = [y, w] + + A = [a1 , a2] + B = [b1 , b2] + BT = [bt1, bt2] + + * denotes "dot product". + The result of DotProducts (2, 2, 2, A, B) will be: [a1* b1, a2* b1, a1* b2, a2* b2] + The result of a matrix multiplicaton would be: [a1*bt1, a1*bt2, a2*bt1, a2*bt2] + The result of DotProducts (2, 2, 2, A, BT)will be: [a1*bt1, a2*bt1, a1*bt2, a2*bt2] + The transposed result of DotProducts (2, 2, 4, A, BT) will be the same as a matrix multiplication AB. + OR + Given that (A B)T = (BT AT), + The result of DotProducts (2, 2, 2, BT, A) is the same as a matrix multiplication AB. + This interpretation is valid for the functions: + * InterleavedDotProduct + * DotProducts + * DotProductsTiled + *) procedure InterleavedDotProduct(InterleavedAs, B:TNNetVolume); overload; procedure InterleavedDotProduct(InterleavedAs, Bs:TNNetVolume; VectorSize: integer); overload; procedure DotProducts(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume); + procedure DotProductsPointwise(VAs, VBs: TNNetVolume); procedure DotProductsTiled(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); procedure GroupedDotProductsTiled(Groups, NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); procedure AddArea(DestX, DestY, OriginX, OriginY, LenX, LenY: integer; Original: TNNetVolume); @@ -333,9 +412,16 @@ TNNetVolume = class (TVolume) function HasAVX2: boolean; function HasAVX512: boolean; function PearsonCorrelation(Y : TNNetVolume): TNeuralFloat; + // AddSumChannel adds the sum of each channel to the current 1D array. procedure AddSumChannel(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} + // AddSumSqrChannel is designed to compute the sum of the squares of elements + // channel-wise from Original and add this sum to the current volume. procedure AddSumSqrChannel(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} + // AddToChannels receives an 1D array (Original). Each element in Original + // will be summed to the entire XY 2D slice at the same depth. procedure AddToChannels(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} + // MulChannels receives an 1D array (Original). Each element in Original + // will multiply the entire XY 2D slice at the same depth. procedure MulChannels(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} procedure Mul(Original: TNNetVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure NormalizeMax(Value: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} @@ -364,7 +450,8 @@ TNNetVolume = class (TVolume) procedure Divi(Value: Single); overload; {$IFDEF Release} inline; {$ENDIF} procedure Copy(Original: TNNetVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure CopyRelu(Original: TNNetVolume); overload; {$IFDEF Release} inline; {$ENDIF} - procedure CopyPadding(Original: TNNetVolume; Padding: integer); + procedure CopyPadding(Original: TNNetVolume; Padding: integer); overload; + procedure CopyPadding(Original: TNNetVolume; PaddingX, PaddingY: integer); {$IFDEF Release} inline; {$ENDIF} overload; procedure CopyNoChecks(Original: TNNetVolume); function GetSum(): TNeuralFloat; override; function GetSumSqr(): TNeuralFloat; override; @@ -376,6 +463,42 @@ TNNetVolume = class (TVolume) DataPtr: TNeuralFloatArrPtr read FDataPtr; end; + { TNNetSamplerBase } + + TNNetSamplerBase = class(TObject) + protected + FTokenArr: TNNetTokenArray; + public + function GetToken(Origin: TNNetVolume): integer; virtual; abstract; + procedure SortTokenArray(); + destructor Destroy(); override; + end; + + { TNNetSamplerGreedy } + TNNetSamplerGreedy = class (TNNetSamplerBase) + public + function GetToken(Origin: TNNetVolume): integer; override; + end; + + { TNNetSamplerTopK } + TNNetSamplerTopK = class (TNNetSamplerBase) + protected + FTopK: integer; + public + constructor Create(TopK: integer); + function GetToken(Origin: TNNetVolume): integer; override; + end; + + { TNNetSamplerTopP } + TNNetSamplerTopP = class (TNNetSamplerBase) + protected + FTopP: TNeuralFloat; + public + constructor Create(TopP: TNeuralFloat); + function GetToken(Origin: TNNetVolume): integer; override; + end; + + /// Implements a pair of volumes TNNetVolumePair = class(TObject) protected @@ -425,6 +548,7 @@ TNNetVolumeList = class (TNNetList) function GetSum(): TNeuralFloat; function GetAvg(): TNeuralFloat; procedure AddValue(Value: TNeuralFloat); + procedure Mul(Value: TNeuralFloat); procedure Divi(Value: TNeuralFloat); function GetClosestId(Original: TNNetVolume; var MinDist: TNeuralFloat): integer; function GetManhattanClosestId(Original: TNNetVolume; var MinDist: TNeuralFloat): integer; @@ -498,18 +622,34 @@ TNNetStringList = class(TStringList) procedure KeepLast(Cnt: integer); procedure DeleteFirst(Cnt: integer); procedure DeleteLast(Cnt: integer); + procedure SetCapacity(NewCapacity: Integer); override; + function GetDelimitedTextFast: string; + procedure LoadLargeFile(Filename: string); end; { TStringListInt } TStringListInt = class(TNNetStringList) private + FTokenizer: TStringList; + FIntegerToStr: array of string; + function GetInteger(Index: Integer): PtrInt; {$IFDEF Release} inline; {$ENDIF} procedure PutInteger(Index: Integer; AValue: PtrInt); {$IFDEF Release} inline; {$ENDIF} public constructor Create; + destructor Destroy; override; + procedure SortByIntegerAsc; procedure SortByIntegerDesc; function AddInteger(const S: string; AValue: PtrInt): integer; {$IFDEF Release} inline; {$ENDIF} + function WordToIndex(pWord:string): integer; + function WordToInteger(pWord:string): integer; + function IntegerToWord(pInteger: integer): string; + procedure SaveCurrentPositionAndSort(); + procedure StringToIndexArray(pString: string; var IntArr: TNeuralIntegerArray); + procedure StringToIntegerArray(pString: string; var IntArr: TNeuralIntegerArray); + function IndexArrayToString(var IntArr: TNeuralIntegerArray): string; + function IntegerArrayToString(var IntArr: TNeuralIntegerArray): string; property Integers[Index: Integer]: PtrInt read GetInteger write PutInteger; end; @@ -601,20 +741,18 @@ TStringStringListVolume = class(TStringsObj) {$ENDIF} { TNNetDictionary } + // This class creates a dictionary where integers contains the frequency. TNNetDictionary = class(TStringListInt) protected - FTokenizer: TStringList; FMaxSize: integer; public constructor Create(pMaxSize: integer); - destructor Destroy; override; function AddWordToDictionary(pWord:string): boolean; function AddWordsToDictionary(pString:string): boolean; procedure AddWordFromCsvField(filename: string; fieldId: integer; SkipFirstLine: boolean = True; Separator:char = ','); procedure RemoveAllStringsWithLessThen(I:integer); - function WordToIndex(pWord:string): integer; procedure StringToVolume(pString: string; Volume: TNNetVolume); function VolumeToString(Volume: TNNetVolume; Threshold: TNeuralFloat = 0.2): string; procedure CsvToTStringVolumeList(filename: string; @@ -625,8 +763,8 @@ TNNetDictionary = class(TStringListInt) procedure LoadDictionaryFromFile(Filename: string; Separator:char = ','); end; - function CreateTokenizedStringList(str: string; c:char):TStringList; overload; - function CreateTokenizedStringList(c:char):TStringList; overload; + function CreateTokenizedStringList(str: string; c:char):TNNetStringList; overload; + function CreateTokenizedStringList(c:char):TNNetStringList; overload; function HiperbolicTangent(x: TNeuralFloat): TNeuralFloat; function HiperbolicTangentDerivative(x: TNeuralFloat): TNeuralFloat; @@ -687,9 +825,13 @@ TNNetDictionary = class(TStringListInt) function NeuralFloatToStr(V: TNeuralFloat): string; function NeuralStrToFloat(V: String): TNeuralFloat; + function GetLastChars(const InputStr: string; LenStr: Integer): string; + procedure TestTNNetVolume(); procedure TestKMeans(); + function GetDefaultNumericFormat: TFormatSettings; + implementation {$IFDEF CPUX64} @@ -699,18 +841,25 @@ implementation {$DEFINE x64} {$ENDIF} -uses {$IFNDEF x64} Neural.AVX {$ELSE} Neural.AVXx64{$ENDIF}, neuralbit, +uses {$IFNDEF x64} Neural.AVX {$ELSE} Neural.AVXx64{$ENDIF}, neuralbit, Math, strutils, CPUFeatures; -function CreateTokenizedStringList(str: string; c:char):TStringList; +var locDataFmtSet : TFormatSettings; + +function GetDefaultNumericFormat: TFormatSettings; +begin + Result := locDataFmtSet; +end; + +function CreateTokenizedStringList(str: string; c:char):TNNetStringList; begin Result := CreateTokenizedStringList(c); Result.DelimitedText := str; end; -function CreateTokenizedStringList(c: char): TStringList; +function CreateTokenizedStringList(c: char): TNNetStringList; begin - Result := TStringList.Create; + Result := TNNetStringList.Create; Result.Sorted := false; Result.Delimiter := c; Result.StrictDelimiter := true; @@ -1319,6 +1468,23 @@ procedure WriteLnPassIfZero(x: TNeuralFloat; Tolerance: TNeuralFloat=0.0001); else WriteLn(' FAILED.'); end; +// https://machinelearningmastery.com/a-gentle-introduction-to-positional-encoding-in-transformer-models-part-1/ +// Expected result is: +// [[ 0. 1. 0. 1. ] +// [ 0.84147098 0.54030231 0.09983342 0.99500417] +// [ 0.90929743 -0.41614684 0.19866933 0.98006658] +// [ 0.14112001 -0.9899925 0.29552021 0.95533649]] +procedure TestTNNetVolumePositionalEncoding; +var + X: TNNetVolume; +begin + X := TNNetVolume.Create(4,1,4); + X.PositionalEncoding(100); + X.Print(); + X.Free; + readln; +end; + procedure TestTNNetVolume(); var TestSize: integer; @@ -1736,9 +1902,103 @@ function HardSwishDerivative(x: TNeuralFloat): TNeuralFloat; end; end; -{$IFDEF FPC} +procedure QuickSortTokenArray(var A: TNNetTokenArray; iLo, iHi: Integer); +var + Lo, Hi: Integer; + Mid, T: TNNetToken; +begin + Lo := iLo; + Hi := iHi; + Mid := A[(Lo + Hi) div 2]; + repeat + while A[Lo].Score > Mid.Score do Inc(Lo); + while A[Hi].Score < Mid.Score do Dec(Hi); + if Lo <= Hi then + begin + T := A[Lo]; + A[Lo] := A[Hi]; + A[Hi] := T; + Inc(Lo); + Dec(Hi); + end; + until Lo > Hi; + if Hi > iLo then QuickSortTokenArray(A, iLo, Hi); + if Lo < iHi then QuickSortTokenArray(A, Lo, iHi); +end; + +{ TNNetSamplerTopP } + +constructor TNNetSamplerTopP.Create(TopP: TNeuralFloat); +begin + inherited Create(); + FTopP := TopP; +end; + +function TNNetSamplerTopP.GetToken(Origin: TNNetVolume): integer; +var + CumulativeSum: TNeuralFloat; + I, Threshold: Integer; +begin + Origin.GetTokenArray(FTokenArr); + SortTokenArray(); + CumulativeSum := 0; + Threshold := 0; + for I := Low(FTokenArr) to High(FTokenArr) do + begin + CumulativeSum := CumulativeSum + FTokenArr[i].Score; + if CumulativeSum > FTopP then + begin + Threshold := I; + Break; + end; + end; + + // Randomly select one of the top tokens within the threshold. + if Threshold > 0 then + Result := FTokenArr[Random(Threshold)].Token + else + Result := FTokenArr[0].Token; // Fallback in case P is too low. +end; + +{ TNNetSamplerTopK } + +constructor TNNetSamplerTopK.Create(TopK: integer); +begin + inherited Create(); + FTopK := TopK; +end; + +function TNNetSamplerTopK.GetToken(Origin: TNNetVolume): integer; +begin + Origin.GetTokenArray(FTokenArr); + SortTokenArray(); + Result := FTokenArr[Random(FTopK)].Token; +end; + +{ TNNetSamplerBase } + +procedure TNNetSamplerBase.SortTokenArray; +begin + QuickSortTokenArray(FTokenArr, Low(FTokenArr), High(FTokenArr)); +end; + +destructor TNNetSamplerBase.Destroy; +begin + SetLength(FTokenArr, 0); + inherited Destroy; +end; + +{ TNNetSamplerGreedy } + +function TNNetSamplerGreedy.GetToken(Origin: TNNetVolume): integer; +begin + Result := Origin.GetClass(); +end; + { TStringStringList } +{$IFDEF FPC} + procedure TStringStringList.LoadFromCsv(filename: string; SkipFirstLine:boolean = true; KeyId: integer = -1; @@ -1904,6 +2164,88 @@ procedure TNNetStringList.DeleteLast(Cnt: integer); end; end; +procedure TNNetStringList.SetCapacity(NewCapacity: Integer); +begin + inherited SetCapacity(NewCapacity); +end; + +/// Helper function to check if a string contains any character from a set +// This function was coded by chatGPT4. +function StrHasChars(const Str: string; Strict: Boolean; const Chars: TSysCharSet): Boolean; +var + P: PChar; +begin + P := PChar(Str); + while (P^ <> #0) and (not CharInSet(P^, Chars) or Strict) do Inc(P); + Result := P^ <> #0; +end; + +// This function was coded by chatGPT4. +function TNNetStringList.GetDelimitedTextFast: string; +{$IFNDEF FPC} +begin + Result := DelimitedText; +end; +{$ELSE} +var + I: Integer; + S: String; + BreakChars: set of Char; + DoQuote: Boolean; + StringBuilder: TAnsiStringBuilder; +begin + CheckSpecialChars; + if StrictDelimiter then + BreakChars := [#0, QuoteChar, Delimiter] + else + BreakChars := [#0..' ', QuoteChar, Delimiter]; + + StringBuilder := TAnsiStringBuilder.Create(); + try + for I := 0 to Count - 1 do + begin + S := Strings[I]; + DoQuote := AlwaysQuote; + if not DoQuote then + begin + // Quote strings that include BreakChars + DoQuote := StrHasChars(S, True, BreakChars); + end; + if DoQuote and (QuoteChar <> #0) then + StringBuilder.Append(AnsiQuotedStr(S, QuoteChar)) + else + StringBuilder.Append(S); + + if I < Count - 1 then + StringBuilder.Append(Delimiter); + end; + + // Quote empty string + if (StringBuilder.Length = 0) and (Count = 1) and (QuoteChar <> #0) then + StringBuilder.Append(QuoteChar).Append(QuoteChar); + + Result := StringBuilder.ToString; + finally + StringBuilder.Free; + end; +end; +{$ENDIF} + +procedure TNNetStringList.LoadLargeFile(Filename: string); +var + LargeFile: TextFile; + StrLine: string; +begin + AssignFile(LargeFile, Filename); + Reset(LargeFile); + while not Eof(LargeFile) do + begin + ReadLn(LargeFile, StrLine); + Self.Add(StrLine); + end; + CloseFile(LargeFile); +end; + {$IFDEF FPC} { TStringsObj } function TStringsObj.GetList(Index: Integer): TObj; @@ -2075,6 +2417,15 @@ constructor TStringListInt.Create; begin inherited Create; Self.OwnsObjects := false; + FTokenizer := CreateTokenizedStringList(' '); + SetLength(FIntegerToStr, 0); +end; + +destructor TStringListInt.Destroy; +begin + SetLength(FIntegerToStr, 0); + FTokenizer.Free; + inherited Destroy; end; procedure TStringListInt.SortByIntegerAsc; @@ -2102,15 +2453,8 @@ constructor TNNetDictionary.Create(pMaxSize: integer); Self.CaseSensitive := false; FMaxSize := pMaxSize; - - FTokenizer := CreateTokenizedStringList(' '); end; -destructor TNNetDictionary.Destroy; -begin - FTokenizer.Free; - inherited Destroy; -end; function TNNetDictionary.AddWordToDictionary(pWord: string): boolean; var @@ -2214,11 +2558,134 @@ procedure TNNetDictionary.RemoveAllStringsWithLessThen(I: integer); end; end; -function TNNetDictionary.WordToIndex(pWord: string): integer; +function TStringListInt.WordToIndex(pWord: string): integer; begin if not(Self.Find(pWord, Result)) then Result := -1; end; +function TStringListInt.WordToInteger(pWord: string): integer; +var + Position: integer; +begin + if Self.Find(pWord, Position) then + begin + Result := Integers[Position]; + end + else + begin + Result := -1; + end; +end; + +function TStringListInt.IntegerToWord(pInteger: integer): string; +begin + Result := FIntegerToStr[pInteger]; +end; + +procedure TStringListInt.StringToIndexArray(pString: string; + var IntArr: TNeuralIntegerArray); +var + WordCount: integer; + WordIndex: integer; +begin + FTokenizer.DelimitedText := pString; + + if FTokenizer.Count > 0 then + begin + SetLength(IntArr, FTokenizer.Count); + for WordCount := 0 to FTokenizer.Count - 1 do + begin + WordIndex := Self.WordToIndex(FTokenizer[WordCount]); + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordIndex >= 0 then + begin + IntArr[WordCount] := WordIndex; + end; + end; + end; +end; + +procedure TStringListInt.StringToIntegerArray(pString: string; + var IntArr: TNeuralIntegerArray); +var + WordCount: integer; + WordInteger: integer; +begin + FTokenizer.DelimitedText := pString; + + if FTokenizer.Count > 0 then + begin + SetLength(IntArr, FTokenizer.Count); + for WordCount := 0 to FTokenizer.Count - 1 do + begin + WordInteger := Self.WordToInteger(FTokenizer[WordCount]); + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordInteger >= 0 then + begin + IntArr[WordCount] := WordInteger; + end; + end; + end; +end; + +function TStringListInt.IndexArrayToString(var IntArr: TNeuralIntegerArray + ): string; +var + WordCount, WordMax: integer; + WordIndex: integer; +begin + Result := ''; + WordMax := Length(IntArr) - 1; + if WordMax >= 0 then + begin + for WordCount := 0 to WordMax do + begin + WordIndex := IntArr[WordCount]; + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordIndex >= 0 then + begin + Result := Result + Self[WordIndex]; + end; + end; + end; +end; + +function TStringListInt.IntegerArrayToString(var IntArr: TNeuralIntegerArray + ): string; +var + WordCount, WordMax: integer; + WordInteger: integer; +begin + Result := ''; + WordMax := Length(IntArr) - 1; + if WordMax >= 0 then + begin + for WordCount := 0 to WordMax do + begin + WordInteger := IntArr[WordCount]; + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordInteger >= 0 then + begin + Result := Result + FIntegerToStr[WordInteger]; + end; + end; + end; +end; + +procedure TStringListInt.SaveCurrentPositionAndSort(); +var + RowCnt: integer; +begin + SetLength(FIntegerToStr, Self.Count); + for RowCnt := 0 to Self.Count - 1 do + begin + Self.Integers[RowCnt] := RowCnt; + FIntegerToStr[RowCnt] := Self[RowCnt]; + end; + Self.Sort(); + Self.Sorted := true; +end; + procedure TNNetDictionary.StringToVolume(pString: string; Volume: TNNetVolume); var WordCount: integer; @@ -2620,6 +3087,19 @@ procedure TNNetVolumeList.AddValue(Value: TNeuralFloat); end; end; +procedure TNNetVolumeList.Mul(Value: TNeuralFloat); +var + I: integer; +begin + if (Count>0) then + begin + for I := 0 to Count - 1 do + begin + Self[I].Mul(Value); + end; + end; +end; + procedure TNNetVolumeList.Divi(Value: TNeuralFloat); var I: integer; @@ -2732,7 +3212,9 @@ procedure TNNetVolumeList.ConcatInto(V: TNNetVolume); TotalSize := Self.GetTotalSize(); if V.Size <> TotalSize then begin - V.ReSize(TotalSize,1,1); + if TotalSize = Count * Self[0].Size + then V.ReSize(Count,1,Self[0].Size) + else V.ReSize(TotalSize,1,1); end; CurrPos := 0; @@ -2983,9 +3465,6 @@ constructor TVolume.Create(pSizeX, pSizeY, pDepth: integer; c: T); ReSize(pSizeX, pSizeY, pDepth); Fill(c); ClearTag(); - - {$IFDEF FPC} FFormatSettings := DefaultFormatSettings; {$ENDIF} - FFormatSettings.DecimalSeparator := '.'; end; constructor TVolume.Create(pInput: array of T); @@ -3239,6 +3718,86 @@ procedure TVolume.AddFromDepthToDepth(Original: TVolume; FromDepth, end; end; +procedure TVolume.AddTransposingXD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.Depth, Original.SizeY, Original.SizeX); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxY > 0 then + begin + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Add(CntX, CntY, CntD, Original[CntD, CntY, CntX]); + end; + end; + end; + end + else + begin + for CntX := 0 to MaxX do + begin + for CntD := 0 to MaxD do + begin + Add(CntX, 0, CntD, Original[CntD, 0, CntX]); + end; + end; + end; +end; + +procedure TVolume.AddTransposingYD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.SizeX, Original.Depth, Original.SizeY); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxX > 0 then + begin + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Add(CntX, CntY, CntD, Original[CntX, CntD, CntY]); + end; + end; + end; + end + else + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Add(0, CntY, CntD, Original[0, CntD, CntY]); + end; + end; + end; +end; + +procedure TVolume.AddTransposingAs2D(Original: TVolume); +var + OriginalSizeX, OriginalSizeY, OriginalDepth: integer; +begin + OriginalSizeX := Original.SizeX; + OriginalSizeY := Original.SizeY; + OriginalDepth := Original.Depth; + Original.ReSize(OriginalSizeX*OriginalSizeY, 1, OriginalDepth); + AddTransposingXD(Original); + Original.ReSize(OriginalSizeX, OriginalSizeY, OriginalDepth); +end; + procedure TVolume.CopyFromDepthToDepth(Original: TVolume; FromDepth, ToDepth: integer); var @@ -3921,21 +4480,18 @@ procedure TVolume.Resize(pSize: integer); end; procedure TVolume.ReSize(pSizeX, pSizeY, pDepth: integer); +var + NewSize: integer; begin - if - (FSizeX <> pSizeX) or - (FSizeY <> pSizeY) or - (FDepth <> pDepth) or - (Length(FData) = 0) then + NewSize := pSizeX * pSizeY * pDepth; + if (NewSize <> FSize) then begin - FSizeX := pSizeX; - FSizeY := pSizeY; - FDepth := pDepth; - - FSize := FSizeX * FSizeY * FDepth; - + FSize := NewSize; SetLength(FData, FSize); end; + FSizeX := pSizeX; + FSizeY := pSizeY; + FDepth := pDepth; end; procedure TVolume.ReSize(Original: TVolume); @@ -4077,6 +4633,81 @@ procedure TVolume.CopyNoChecks(Original: TVolume); Move(Original.FData[0], Self.FData[0], Self.Size * SizeOf(T)); end; +procedure TVolume.CopyNoChecks(var Original: array of byte); +var + I: integer; + vHigh: integer; +begin + if Length(Original) > 0 then + begin + vHigh := High(Original); + for I := 0 to vHigh do + begin + FData[I] := Original[I]; + end; + end; +end; + +procedure TVolume.CopyNoChecksIntArr(var Original: array of integer); +var + I: integer; + vHigh: integer; +begin + if Length(Original) > 0 then + begin + vHigh := High(Original); + for I := 0 to vHigh do + begin + FData[I] := Original[I]; + end; + end; +end; + +procedure TVolume.CopyReversedNoChecksIntArr(var Original: array of integer); +var + I: integer; + MaxLen: integer; +begin + MaxLen := Length(Original) - 1; + if MaxLen >= 0 then + begin + for I := 0 to MaxLen do + begin + FData[I] := Original[MaxLen - I]; + end; + end; +end; + +procedure TVolume.CopyNoChecks(var Original: string); +var + I: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + for I := 1 to LenOriginal do + begin + FData[I-1] := Ord(Original[I]); + end; + end; +end; + +procedure TVolume.CopyReversedNoChecks(var Original: string); +var + I: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + for I := 1 to LenOriginal do + begin + FData[I-1] := Ord(Original[LenOriginal - I + 1]); + end; + end; +end; + procedure TVolume.CopyChannels(Original: TVolume; aChannels: array of integer); var MaxX, MaxY: integer; @@ -4190,21 +4821,23 @@ procedure TVolume.Copy(Original: TBits; pFlase: T = -0.5; pTrue: T = +0.5); end; end; -procedure TVolume.CopyAsBits(var Original: array of byte; pFlase: T = -0.5; pTrue: T = +0.5); +procedure TVolume.CopyAsBits(var Original: array of byte; pFalse: T = -0.5; pTrue: T = +0.5; CanResize:boolean = True); var I: integer; vHigh: integer; + LenOriginal: integer; aTranslate: array [0..1] of T; begin - if Length(Original) > 0 then + LenOriginal := Length(Original); + if LenOriginal > 0 then begin - if (Length(Original)*8 <> Self.Size) then + if CanResize and (LenOriginal*8 <> Self.Size) then begin - Self.ReSize(Length(Original), 1, 8); + Self.ReSize(LenOriginal, 1, 8); end; - vHigh := Length(Original) * 8 - 1; - aTranslate[0] := pFlase; + vHigh := LenOriginal * 8 - 1; + aTranslate[0] := pFalse; aTranslate[1] := pTrue; for I := 0 to vHigh do @@ -4214,6 +4847,47 @@ procedure TVolume.CopyAsBits(var Original: array of byte; pFlase: T = -0.5; pTru end; end; +procedure TVolume.CopyAsBits(Original: string; pFalse: T; pTrue: T; CanResize:boolean); +var + AB: array of byte; + I: integer; + vHigh: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + SetLength(AB, LenOriginal); + vHigh := LenOriginal; + for I := 1 to vHigh do + begin + AB[I-1] := Min(Ord(Original[I]), 255); + end; + Self.CopyAsBits(AB, pFalse, pTrue, CanResize); + end; +end; + +procedure TVolume.CopyAsBitsReversed(Original: string; pFalse: T; pTrue: T); +var + AB: array of byte; + I: integer; + vHigh: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + SetLength(AB, LenOriginal); + vHigh := LenOriginal; + for I := 1 to vHigh do + begin + AB[I-1] := Min(Ord(Original[vHigh-I+1]), 255); + end; + Self.CopyAsBits(AB, pFalse, pTrue, False); + SetLength(AB, 0); + end; +end; + (* procedure TVolume.CopyPadding(Original: TVolume; Padding: integer); var @@ -4268,6 +4942,30 @@ procedure TVolume.CopyPadding(Original: TVolume; Padding: integer); end; end; +procedure TVolume.CopyPadding(Original: TVolume; PaddingX, PaddingY: integer); +var + CntY: integer; + NewSizeX, NewSizeY: integer; + MaxY: integer; + RowSize: integer; + SourceRawPos, DestRawPos: integer; +begin + NewSizeX := Original.SizeX + PaddingX * 2; + NewSizeY := Original.SizeY + PaddingY * 2; + MaxY := Original.SizeY - 1; + RowSize := Original.SizeX * Original.Depth * SizeOf(TNeuralFloat); + + Resize(NewSizeX, NewSizeY, Original.Depth); + Fill(0); + + for CntY := 0 to MaxY do + begin + SourceRawPos := Original.GetRawPos(0, CntY, 0); + DestRawPos := GetRawPos(PaddingX, CntY + PaddingY, 0); + Move(Original.FData[SourceRawPos], Self.FData[DestRawPos], RowSize); + end; +end; + procedure TVolume.CopyCropping(Original: TVolume; StartX, StartY, pSizeX, pSizeY: integer); var @@ -4298,36 +4996,116 @@ procedure TVolume.CopyResizing(Original: TVolume; NewSizeX, NewSizeY: integer); MoveSizeBytes: integer; RawPostDest, RawPosSource: integer; begin - if (NewSizeX=Original.SizeX) and (NewSizeY=Original.SizeY) then + if (NewSizeX=Original.SizeX) and (NewSizeY=Original.SizeY) then + begin + Copy(Original); + end + else + begin + ReSize(NewSizeX, NewSizeY, Original.Depth); + RatioX := NewSizeX / Original.SizeX; + RatioY := NewSizeY / Original.SizeY; + + MaxX := SizeX - 1; + MaxY := SizeY - 1; + OrigMaxX := Original.SizeX - 1; + OrigMaxY := Original.SizeY - 1; + MoveSizeBytes := Depth * SizeOf(T); + + for CntX := 0 to MaxX do + begin + OrigPosX := Min(OrigMaxX, Round(CntX / RatioX)); + for CntY := 0 to MaxY do + begin + OrigPosY := Min(OrigMaxY, Round(CntY / RatioY)); + RawPostDest := GetRawPos(CntX, CntY); + RawPosSource := Original.GetRawPos(OrigPosX, OrigPosY); + Move(Original.FData[RawPosSource], FData[RawPostDest], MoveSizeBytes); + end; + end; + end; +end; + +procedure TVolume.CopyTransposingXD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.Depth, Original.SizeY, Original.SizeX); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxY > 0 then + begin + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Self[CntX, CntY, CntD] := Original[CntD, CntY, CntX]; + end; + end; + end; + end + else + begin + for CntX := 0 to MaxX do + begin + for CntD := 0 to MaxD do + begin + Self[CntX, 0, CntD] := Original[CntD, 0, CntX]; + end; + end; + end; +end; + +procedure TVolume.CopyTransposingYD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.SizeX, Original.Depth, Original.SizeY); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxX > 0 then begin - Copy(Original); + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Self[CntX, CntY, CntD] := Original[CntX, CntD, CntY]; + end; + end; + end; end else begin - ReSize(NewSizeX, NewSizeY, Original.Depth); - RatioX := NewSizeX / Original.SizeX; - RatioY := NewSizeY / Original.SizeY; - - MaxX := SizeX - 1; - MaxY := SizeY - 1; - OrigMaxX := Original.SizeX - 1; - OrigMaxY := Original.SizeY - 1; - MoveSizeBytes := Depth * SizeOf(T); - - for CntX := 0 to MaxX do + for CntY := 0 to MaxY do begin - OrigPosX := Min(OrigMaxX, Round(CntX / RatioX)); - for CntY := 0 to MaxY do + for CntD := 0 to MaxD do begin - OrigPosY := Min(OrigMaxY, Round(CntY / RatioY)); - RawPostDest := GetRawPos(CntX, CntY); - RawPosSource := Original.GetRawPos(OrigPosX, OrigPosY); - Move(Original.FData[RawPosSource], FData[RawPostDest], MoveSizeBytes); + Self[0, CntY, CntD] := Original[0, CntD, CntY]; end; end; end; end; +procedure TVolume.CopyTransposingAs2D(Original: TVolume); +var + OriginalSizeX, OriginalSizeY, OriginalDepth: integer; +begin + OriginalSizeX := Original.SizeX; + OriginalSizeY := Original.SizeY; + OriginalDepth := Original.Depth; + Original.ReSize(OriginalSizeX*OriginalSizeY, 1, OriginalDepth); + CopyTransposingXD(Original); + Original.ReSize(OriginalSizeX, OriginalSizeY, OriginalDepth); +end; + function TVolume.DotProduct(Original: TVolume): T; begin {$IFDEF Debug} @@ -4781,6 +5559,29 @@ function TVolume.GetMagnitude(): T; Result := Sqrt( Aux ); end; +function TVolume.GetEntropy: T; +var + I, vHigh: integer; + vSum: TNeuralFloat; +begin + vSum := 0; + if FSize > 0 then + begin + vHigh := FSize - 1; + for I := 0 to vHigh do + begin + if FData[I] > 0 then // To avoid log(0) which is undefined + vSum := vSum + (FData[i] * log2(FData[i])); + end; + end; + Result := -vSum; +end; + +function TVolume.GetPerplexity: T; +begin + Result := Power(2, GetEntropy()); +end; + procedure TVolume.FlipX(); var iFrom, iTo: integer; @@ -4855,7 +5656,7 @@ procedure TVolume.ClearTag(); function TVolume.NeuralToStr(V: TNeuralFloat): string; begin - Result := FloatToStr(V, FFormatSettings); + Result := FloatToStr(V, locDataFmtSet); end; procedure TVolume.LoadNonZeroPosIntoTIntegerList(Ints: TIntegerList; @@ -5125,31 +5926,289 @@ function TVolume.GetClass(): integer; end; end; +function TVolume.GetClassOnPixel(X, Y: integer): integer; +var + I: integer; + vHigh: integer; + vMax: T; + Pos: integer; + Value: T; +begin + vHigh := Depth; + if (vHigh>0) then + begin + Result := 0; + Pos := GetRawPos(X, Y); + vMax := FData[Pos]; + for I := 1 to vHigh do + begin + Inc(Pos); + Value := FData[Pos]; + if Value > vMax then + begin + Result := I; + vMax := Value; + end; + end; + end else + begin + Result := -1; + end; +end; + function TVolume.SoftMax(): T; var I: integer; vHigh: integer; LocalValue: T; TotalSum: TNeuralFloat; - MaxValue: T; + MinValue, MaxValue: T; begin MaxValue := GetMax(); + if MaxValue <> 0 then Sub(MaxValue); + MinValue := GetMin(); + TotalSum := 0; - vHigh := High(FData); - for I := 0 to vHigh do + // forces range [-1000,0] + if MinValue <> 0 then + begin + if MinValue < -1000 then Mul( -1000/MinValue ); + vHigh := High(FData); + + for I := 0 to vHigh do + begin + // LocalValue := Exp( NeuronForceRange(FData[I] - MaxValue, 4000) ); + LocalValue := Exp( FData[I] ); + FData[I] := LocalValue; + TotalSum := TotalSum + FData[I]; + end; + + if TotalSum > 0 then + begin + Divi(TotalSum); + end; + end; + + Result := TotalSum; +end; + +procedure TVolume.PointwiseSoftMax; +var + I, StartPointPos: integer; + MaxX, MaxY, MaxD: integer; + CountX, CountY, CountD: integer; + MaxValue: T; + LocalValue: T; + TotalSum: TNeuralFloat; +begin + // TODO: This portion of code can be optimized + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + + if MaxD > 0 then begin - LocalValue := Exp( NeuronForceRange(FData[I] - MaxValue, 4000) ); - FData[I] := LocalValue; - TotalSum := TotalSum + FData[I]; + for CountX := 0 to MaxX do + begin + for CountY := 0 to MaxY do + begin + StartPointPos := GetRawPos(CountX, CountY); + I := StartPointPos; + // Find the point max value. + MaxValue := FData[I]; + for CountD := 1 to MaxD do + begin + Inc(I); + if FData[I] > MaxValue + then MaxValue := FData[I]; + end; + TotalSum := 0; + I := StartPointPos; + for CountD := 0 to MaxD do + begin + LocalValue := Exp( NeuronForceRange(FData[I] - MaxValue, 4000) ); + FData[I] := LocalValue; + TotalSum := TotalSum + LocalValue; + Inc(I); + end; + if TotalSum > 0 then + begin + I := StartPointPos; + for CountD := 0 to MaxD do + begin + FData[I] := FData[I] / TotalSum; + Inc(I); + end; + end; + end; + end; end; +end; - if TotalSum > 0 then +procedure TVolume.OneHotEncoding(aTokens: array of integer); +var + CntToken, MaxToken, Token: integer; +begin + MaxToken := Length(aTokens) - 1; + Self.Fill(0); + if MaxToken < SizeX then + begin + for CntToken := 0 to MaxToken do + begin + Token := aTokens[CntToken]; + if Token < FDepth then + begin + Self[CntToken, 0, Token] := 1; + end + else + begin + WriteLn('Token '+IntToStr(Token)+' is bigger than Depth '+IntToStr(FDepth)+' at OneHotEncoding.'); + end; + end; + end + else begin - Divi(TotalSum); + WriteLn('Token length '+IntToStr(MaxToken + 1)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncoding.'); end; +end; - Result := TotalSum; +procedure TVolume.OneHotEncoding(aTokens: string); +var + CntToken, MaxToken, Token: integer; +begin + MaxToken := Length(aTokens); + Self.Fill(0); + if MaxToken <= SizeX then + begin + for CntToken := 1 to MaxToken do + begin + Token := Ord(aTokens[CntToken]); + if Token < FDepth then + begin + Self[CntToken-1, 0, Token] := 1; + end + end; + end + else + begin + WriteLn('Token length '+IntToStr(MaxToken + 1)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncodingReversed.'); + end; +end; + +function GetLastChars(const InputStr: string; LenStr: Integer): string; +begin + if Length(InputStr) > LenStr then + Result := Copy(InputStr, Length(InputStr) - LenStr + 1, LenStr) + else + Result := InputStr; +end; + +procedure TVolume.OneHotEncodingReversed(aTokens: string); +var + CntToken, MaxToken, Token: integer; + LocalTokens: string; +begin + MaxToken := Length(aTokens); + if MaxToken > SizeX then + begin + LocalTokens := GetLastChars(aTokens, SizeX); + MaxToken := Length(aTokens); + end + else + begin + LocalTokens := aTokens; + end; + Self.Fill(0); + if MaxToken > 0 then + begin + {$IFDEF DEBUG} + if Ord(LocalTokens[MaxToken]) < 2 then + begin + WriteLn('A string for prediction should not end with terminal symbol.'); + end; + if Ord(LocalTokens[1]) < 2 then + begin + WriteLn('A string for prediction should not start with terminal symbol.'); + end; + {$ENDIF} + if MaxToken <= SizeX then + begin + for CntToken := 1 to MaxToken do + begin + Token := Ord(LocalTokens[CntToken]); + if Token < FDepth then + begin + Self[MaxToken-CntToken, 0, Token] := 1; + end; + end; + end + else + begin + WriteLn('This should never happend. Token length '+IntToStr(MaxToken)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncodingReversed.'); + end; + end + else + begin + {$IFDEF DEBUG} + WriteLn('Zero len at OneHotEncodingReversed'); + {$ENDIF} + end; +end; + +procedure TVolume.OneHotEncodingReversed(var aTokens: array of integer); +var + CntToken, MaxToken, Token: integer; +begin + MaxToken := Length(aTokens) - 1; + Self.Fill(0); + if MaxToken < SizeX then + begin + for CntToken := 0 to MaxToken do + begin + Token := aTokens[CntToken]; + if Token < FDepth then + begin + Self[MaxToken-CntToken, 0, Token] := 1; + end + else + begin + WriteLn('Token '+IntToStr(Token)+' is bigger than Depth '+IntToStr(FDepth)+' at OneHotEncoding.'); + end; + end; + end + else + begin + WriteLn('Token length '+IntToStr(MaxToken + 1)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncoding.'); + end; +end; + +procedure TVolume.PositionalEncoding(n: integer); +var + Position: Integer; + divTerm: Double; + MaxX, MaxY, MaxDepth: integer; + CntX, CntY, CntDepth: integer; + EmbeddingSize: integer; +begin + EmbeddingSize := FDepth; + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxDepth := FDepth - 1; + for CntDepth := 0 to MaxDepth do + begin + divTerm := Power(n, (2 * (CntDepth div 2)) / EmbeddingSize); + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + Position := CntY*FSizeX + CntX; + if CntDepth mod 2 = 0 + then Self[CntX, CntY, CntDepth] := Sin(Position / divTerm) + else Self[CntX, CntY, CntDepth] := Cos(Position / divTerm); + end; + end; + end; end; procedure TVolume.RgbToHsv(); @@ -5166,7 +6225,7 @@ procedure TVolume.RgbToHsv(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5195,7 +6254,7 @@ procedure TVolume.HsvToRgb(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5224,7 +6283,7 @@ procedure TVolume.RgbToHsl(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5252,7 +6311,7 @@ procedure TVolume.HslToRgb(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5280,7 +6339,7 @@ procedure TVolume.RgbToLab(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5309,7 +6368,7 @@ procedure TVolume.LabToRgb(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5333,7 +6392,7 @@ procedure TVolume.RgbToGray(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5357,7 +6416,7 @@ procedure TVolume.GetGrayFromRgb(Rgb: TVolume); if Rgb.Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5563,17 +6622,14 @@ procedure TVolume.InitSELU(Value: T); function TVolume.SaveToString(): string; var - S: TStringList; + S: TNNetStringList; I: integer; version: integer; AuxFloat: Single; begin version := 1; - S := TStringList.Create; - S.Sorted := false; - S.Delimiter := ';'; - S.StrictDelimiter := true; - + S := CreateTokenizedStringList(';'); + S.SetCapacity(FSize+10); S.Add( IntToStr(version) ); S.Add( IntToStr(FSizeX) ); S.Add( IntToStr(FSizeY) ); @@ -5582,10 +6638,11 @@ function TVolume.SaveToString(): string; for I := Low(FData) to High(FData) do begin AuxFloat := FData[I]; - S.Add( FloatToStr(AuxFloat, FFormatSettings) ); + S.Add( FloatToStr(AuxFloat, locDataFmtSet) ); end; - Result := S.DelimitedText; + Result := S.GetDelimitedTextFast(); + //Result := S.DelimitedText; S.Free; end; @@ -5628,7 +6685,7 @@ procedure TVolume.LoadFromString(strData: string); begin for I := 4 to S.Count-1 do begin - AuxFloat := StrToFloat(S[I], FFormatSettings); + AuxFloat := StrToFloat(S[I], locDataFmtSet); FData[I-4] := AuxFloat; end; end; @@ -5759,6 +6816,22 @@ procedure TNNetVolume.CalculateLocalResponseFromDepth(Original: TNNetVolume; SqrElements.Free; end; +procedure TNNetVolume.GetTokenArray(var TokenArray: TNNetTokenArray); +var + I, vHigh: integer; +begin + if (Length(TokenArray) <> FSize) then SetLength(TokenArray, FSize); + if FSize > 0 then + begin + vHigh := FSize - 1; + for I := 0 to vHigh do + begin + TokenArray[I].Token:=I; + TokenArray[I].Score:=FData[I]; + end; + end; +end; + procedure TNNetVolume.InterleavedDotProduct(InterleavedAs, B: TNNetVolume); var @@ -5811,7 +6884,31 @@ procedure TNNetVolume.InterleavedDotProduct(InterleavedAs, Bs: TNNetVolume; Inc(CntBVectorSizePlusCntBPos); end; end; +end; + +procedure TNNetVolume.DotProductsPointwise(VAs, VBs: TNNetVolume); +var + VAsCount, VBsCount: integer; +begin + VAsCount := VAs.SizeX * VAs.SizeY; + VBsCount := VBs.SizeX * VBs.SizeY; + if (VAsCount*VBsCount <> FSize) then + begin + Resize(VBsCount, 1, VAsCount); + end; + if (VAs.Depth = VBs.Depth) then + begin + DotProducts(VAsCount, VBsCount, VAs.Depth, VAs, VBs); + end + else + begin + WriteLn( + 'TNNetVolume.DotProductsPointwise - Depths differ '+ + IntToStr(VAs.Depth) + ' ' + + IntToStr(VBs.Depth) + '.' + ); + end; end; procedure TNNetVolume.DotProducts(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume); @@ -6655,7 +7752,14 @@ procedure TNNetVolume.AddArea(DestX, DestY, OriginX, OriginY, LenX, PtrB := Original.GetRawPtr(OriginX, OriginY+CntY); Add(PtrA, PtrB, SizeXDepth); end; - end; + end + {$IFDEF Debug} + else + begin + WriteLn('Error at TNNetVolume.AddArea: depth size doesn''t match. ', + Self.Depth, ' ',Original.Depth); + end + {$ENDIF}; end; // ########################################### @@ -9866,6 +10970,31 @@ procedure TNNetVolume.CopyPadding(Original: TNNetVolume; Padding: integer); end; end; +procedure TNNetVolume.CopyPadding(Original: TNNetVolume; PaddingX, PaddingY: integer + ); +var + CntY: integer; + NewSizeX, NewSizeY: integer; + MaxY: integer; + RowSize: integer; + SourceRawPos, DestRawPos: pointer; +begin + NewSizeX := Original.SizeX + PaddingX * 2; + NewSizeY := Original.SizeY + PaddingY * 2; + MaxY := Original.SizeY - 1; + RowSize := Original.SizeX * Original.Depth; + + Resize(NewSizeX, NewSizeY, Original.Depth); + Fill(0); + + for CntY := 0 to MaxY do + begin + SourceRawPos := Original.GetRawPtr(0, CntY, 0); + DestRawPos := GetRawPtr(PaddingX, CntY + PaddingY, 0); + asm_dword_copy; + end; +end; + procedure TNNetVolume.CopyNoChecks(Original: TNNetVolume); var SourceRawPos, DestRawPos: pointer; @@ -10002,4 +11131,15 @@ initialization locAVX := IsAVXPresent; locAVX2 := IsFMAPresent; locAVX512 := IsAVX512Present; + + {$IF DEFINED(FPC)} + locFmtSet := DefaultFormatSettings; + {$ELSE} + {$IF (CompilerVersion <= 21)} + GetLocaleFormatSettings(0, locDataFmtSet); + {$ELSE} + locFmtSet := TFormatSettings.Create; + {$IFEND} + {$IFEND} + locDataFmtSet.DecimalSeparator := '.'; end. diff --git a/neural/neuralvolumev.pas b/neural/neuralvolumev.pas index a2f712b2..4b1a130c 100644 --- a/neural/neuralvolumev.pas +++ b/neural/neuralvolumev.pas @@ -26,7 +26,7 @@ interface uses Classes, SysUtils, neuralvolume, {$IFDEF FPC}ExtCtrls, Graphics, LCLType, FPImage - {$ELSE} Windows, {$IF CompilerVersion >= 23} VCL.ExtCtrls, VCL.Graphics {$ELSE} ExtCtrls, Graphics {$ENDIF} {$ENDIF}; + {$ELSE} Windows, {$IF CompilerVersion >= 23} VCL.ExtCtrls, VCL.Graphics {$ELSE} ExtCtrls, Graphics {$IFEND} {$ENDIF}; /// saves a bitmap into a file from a handle HWND procedure SaveHandleToBitmap(OutputFileName: string; hWnd: HWND); From 3e7cde6a62e7c4d9b45b5210bb6ada1f1750f51a Mon Sep 17 00:00:00 2001 From: Michael Rabatscher Date: Mon, 29 Apr 2024 11:10:37 +0200 Subject: [PATCH 10/13] Added missing fields in the assign method --- neural/neuralnetwork.pas | 3 +++ 1 file changed, 3 insertions(+) diff --git a/neural/neuralnetwork.pas b/neural/neuralnetwork.pas index 78da221b..b4f23b41 100644 --- a/neural/neuralnetwork.pas +++ b/neural/neuralnetwork.pas @@ -15350,11 +15350,14 @@ procedure TNNetNeuron.Assign(neuron: TNNetNeuron); begin FWeights.Copy( neuron.fWeights ); FBackInertia.Copy(neuron.fBackInertia); + FBackInertia2.Copy(neuron.FBackInertia2); FDelta.Copy(neuron.FDelta); + FDelta2.Copy(neuron.FDelta2); FBiasWeight := neuron.fBiasWeight; FBiasInertia := neuron.FBiasInertia; FBiasDelta := neuron.FBiasDelta; + FBiasInertia2 := neuron.FBiasInertia2; end; constructor TEasyBytePredictionViaNNet.Create(pActionByteLen, From d9ae8eaa6cb11cc11c67f16392478ced1898c835 Mon Sep 17 00:00:00 2001 From: Rabatscher Michael Date: Mon, 27 May 2024 12:32:09 +0200 Subject: [PATCH 11/13] * First version of a Delphi AVX muladd * Updated SelfTest -> avx tests added * Updated code to reference * Removed format settings from constantly beeing created in a base class to one single local instance in neuralvolume --- README.md | 104 +- examples/Hypotenuse/README.md | 3 + examples/IdentityShortcutConnection/README.md | 6 +- examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpi | 165 + examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpr | 91 + .../OnlyOneNeuronOrOperation.lpi | 165 + .../OnlyOneNeuronOrOperation.lpr | 126 + examples/OnlyOneNeuron/README.md | 182 + examples/ResNet/CaiResNet20.lpr | 2 +- examples/ResNet/ResNet20.lpr | 2 +- examples/SelfTest/SelfTest.dpr | 56 +- .../SimpleFashionMNIST/SimpleFashionMNIST.lpr | 62 +- examples/SimpleImageClassifier/README.md | 80 +- .../SimpleImageClassifierGroupedConv.lpr | 16 +- .../SimpleImageClassifierPaddingCropping.lpr | 2 +- ...pleImageClassifierPaddingCroppingSwish.lpr | 2 +- .../SimpleImageClassifierReLU6.lpr | 2 +- .../SimpleImageClassifierResize48.lpr | 2 +- .../SimpleImageClassifierResize64.lpr | 2 +- .../SimpleImageClassifierSwish.lpr | 2 +- examples/SimpleMNist/SimpleMNist.lpr | 37 +- .../NLP_CAI_TinyStories_Simple_Example.ipynb | 6066 +++++++++++++ examples/SimpleNLP/README.md | 146 + examples/SimpleNLP/SimpleNLP.lpi | 170 + examples/SimpleNLP/SimpleNLP.lpr | 209 + .../SimplePlantLeafDisease.ipynb | 7895 +---------------- examples/StringManipulation/README.md | 63 + .../StringManipulation/StringManipulation.lpi | 169 + .../StringManipulation/StringManipulation.lpr | 158 + examples/SuperResolution/README.md | 4 +- .../uvisualautoencodertinyimagenet.lfm | 2 +- .../sentimentAnalysis/sentimentAnalysis.lpi | 169 + .../sentimentAnalysis/sentimentAnalysis.lpr | 284 + neural/Neural.AVX.pas | 112 - neural/Neural.AVXx64.pas | 120 - neural/NeuralAVX.pas | 222 + neural/NeuralAVXx64.pas | 235 + neural/neuraldatasets.pas | 196 +- neural/neuralfit.pas | 71 +- neural/neuralnetwork.pas | 2582 +++++- neural/neuralthread.pas | 23 +- neural/neuralvolume.pas | 1380 ++- neural/neuralvolumev.pas | 2 +- 43 files changed, 13095 insertions(+), 8292 deletions(-) create mode 100644 examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpi create mode 100644 examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpr create mode 100644 examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpi create mode 100644 examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpr create mode 100644 examples/OnlyOneNeuron/README.md create mode 100644 examples/SimpleNLP/NLP_CAI_TinyStories_Simple_Example.ipynb create mode 100644 examples/SimpleNLP/README.md create mode 100644 examples/SimpleNLP/SimpleNLP.lpi create mode 100644 examples/SimpleNLP/SimpleNLP.lpr create mode 100644 examples/StringManipulation/README.md create mode 100644 examples/StringManipulation/StringManipulation.lpi create mode 100644 examples/StringManipulation/StringManipulation.lpr create mode 100644 examples/sentimentAnalysis/sentimentAnalysis.lpi create mode 100644 examples/sentimentAnalysis/sentimentAnalysis.lpr delete mode 100644 neural/Neural.AVX.pas delete mode 100644 neural/Neural.AVXx64.pas create mode 100644 neural/NeuralAVX.pas create mode 100644 neural/NeuralAVXx64.pas diff --git a/README.md b/README.md index 25cdba8a..da667637 100644 --- a/README.md +++ b/README.md @@ -5,9 +5,15 @@ OpenCL capable devices including AMD, Intel and NVIDIA. This API has been tested This project is a subproject from a bigger and older project called [CAI](https://sourceforge.net/projects/cai/) and is sister to Keras based [K-CAI NEURAL API](https://github.com/joaopauloschuler/k-neural-api). You can find trained neural network models in the [pre-trained-neural-api-networks](https://github.com/joaopauloschuler/pre-trained-neural-api-networks/) repository. +## Intro Videos +[![Watch the video](https://img.youtube.com/vi/aIy1S7clhQo/0.jpg)](https://youtu.be/aIy1S7clhQo) | [![Watch the video](https://img.youtube.com/vi/q56NcgUiAAk/0.jpg)](https://youtu.be/q56NcgUiAAk) | [![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) +--------------------------- | ------------------------------------- | ------------------------- +Basics of Neural Networks in Pascal - Loading and Saving | Neural Networks for Absolute Beginners! Learning a Simple Function | Coding a Neural Network in Pascal that Learns to Calculate the Hypotenuse + ## Why Pascal? -* Compiled pascal code is super fast! This API can outperform some major APIs in some architectures. -* Pascal is easy to learn and easy to make a readable and understandable source code. You'll be able to make super fast **native** code and at the same time have a readable code. +* The Pascal computer language is easy to learn. Pascal allows developers to make a readable and understandable source code. +* You'll be able to make super-fast **native code** and at the same time have a readable code. +* This API can outperform some major APIs in some architectures. ## Prerequisites You'll need [Lazarus](https://www.lazarus-ide.org/) development environment. If you have an OpenCL capable device, you'll need its OpenCL drivers. Many examples use the [CIFAR-10](https://www.cs.toronto.edu/~kriz/cifar.html) dataset. You'll also find examples for the [CIFAR-100](https://www.cs.toronto.edu/~kriz/cifar.html), [MNIST](http://yann.lecun.com/exdb/mnist/), [Fashion MNIST](https://www.kaggle.com/zalando-research/fashionmnist) and the [Places365-Standard Small images 256x256](http://places2.csail.mit.edu/download.html) dataset. @@ -17,6 +23,11 @@ This project is [Lazarus](https://www.lazarus-ide.org/) based. That said, as of ## Installation Clone this project, add the [**neural**](https://github.com/joaopauloschuler/neural-api/tree/master/neural) folder to your [Lazarus](https://www.lazarus-ide.org/) unit search path and you'll be ready to go! + +## A.I. Powered Support +You can get A.I. powered help from these tools: +* [CAI Neural API support at ChatGPT4](https://chat.openai.com/g/g-bqMxEDpIg-neural-api-free-pascal-developer). +* [CAI Neural API support at Poe](https://poe.com/CAI-NEURAL-API). ## Documentation The documentation is composed by: @@ -34,16 +45,88 @@ In this readme file, you’ll find information about: * Other scientific publications from the same author. ### Easy Examples First Please! -Some recommended introductory source code examples are: +[![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) + +You can click on the image above to watch the video. + +Assuming that you would like to train a neural network to learn a function that has 2 inputs and one output, you could start with something like this: +``` + NN.AddLayer([ + TNNetInput.Create(2), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectLinear.Create(1) + ]); +``` +The example above has 2 inputs (`TNNetInput`), 2 dense layers (`TNNetFullConnectReLU`) with 32 neurons each and one output (`TNNetFullConnectLinear`). + +You can learn more about how to build and train simple neural networks at the following source code examples: +* [Only one neuron](https://github.com/joaopauloschuler/neural-api/tree/master/examples/OnlyOneNeuron). * [Training a neural network to learn the hypotenuse function](https://github.com/joaopauloschuler/neural-api/tree/master/examples/Hypotenuse) * [Training a neural network to learn the hypotenuse function with FitLoading](https://github.com/joaopauloschuler/neural-api/tree/master/examples/HypotenuseFitLoading) * [Training a neural network to learn boolean functions AND, OR and XOR with neuralfit unit](https://github.com/joaopauloschuler/neural-api/tree/master/examples/XorAndOr) * [Training a neural network to learn boolean functions AND, OR and XOR without neuralfit unit](https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/supersimple/supersimple.lpr) +### Loading and Saving Neural Networks +Loading is very easy: +``` + NN := TNNet.Create; + NN.LoadFromFile('MyTrainedNeuralNetwork.nn'); +``` +Saving is as easy: + +``` + NN.SaveToFile('MyTrainedNeuralNetwork.nn'); +``` + +### NLP - Training a Simple Neural Network Model for Text Generation +This [NLP source code example](https://github.com/joaopauloschuler/neural-api/tree/master/examples/SimpleNLP) shows a (hello world) small neural network trained on the [Tiny Stories dataset](https://huggingface.co/datasets/roneneldan/TinyStories). This code + +``` + WriteLn(GenerateStringFromChars(NFit.NN, 'once', FSampler),'.'); + WriteLn(GenerateStringFromChars(NFit.NN, 'one ', FSampler),'.'); +``` + +produces this output: +``` +once upon a time, there was a little girl named lily. she loved to play outside i. +one day, a little girl named lily was playing in her garden. she saw a big car wi. +``` + +You can open on colab the raw training file and run it by yourself at: +https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/SimpleNLP/NLP_CAI_TinyStories_Simple_Example.ipynb + +#### Creating Your Own Chat Bot +Once your neural network is trained, you can run your own chat bot with: +``` +var + S: string; + oSampler: TNNetSamplerBase; + NN: TNNet; +begin + oSampler := TNNetSamplerTopP.Create(0.6); + NN := TNNet.Create(); + WriteLn('Loading neural network.'); + NN.LoadFromFile('MyNeuralNetwork.nn'); + NN.DebugStructure(); + WriteLn(); + WriteLn('Write something and I will reply.'); + repeat + Write('User: '); + ReadLn(S); + WriteLn('Neural network: ',GenerateStringFromChars(NN, LowerCase(S), oSampler),'.'); + until S = 'exit'; + NN.Free; + oSampler.Free; +end; +``` + ### Simple Image Classification Examples -#### How Does the Code Look like for an Image Classification (CIFAR-10) Example? -This is an example for image classification: +#### CIFAR-10 Image Classification Example +The CIFAR-10 dataset is a well-known collection of images commonly used to train machine learning and computer vision algorithms. It was created by the Canadian Institute for Advanced Research (CIFAR). It contains 60K 32x32 color images. The images are classified into 10 different classes, with 6,000 images per class. The classes represent airplanes, cars, birds, cats, deer, dogs, frogs, horses, ships, and trucks. Despite its relatively low resolution and small size, CIFAR-10 can be challenging for models to achieve high accuracy, making it a good dataset for testing advancements in machine learning techniques. + +Follows a source code example for the CIFAR-10 image classification: ``` NN := TNNet.Create(); NN.AddLayer([ @@ -101,10 +184,13 @@ You can save and load trained models (neural networks) with `TNNet.SaveToFile` a ``` ### Youtube Videos -There are some available videos: -* [Increasing Image Resolution with Neural Networks](https://www.youtube.com/watch?v=jdFixaZ2P4w) -* [Ultra Fast Single Precision Floating Point Computing](https://www.youtube.com/watch?v=qGnfwpKUTIQ) -* [AVX and AVX2 Code Optimization](https://www.youtube.com/watch?v=Pnv174V_emw) +[![Watch the video](https://img.youtube.com/vi/aIy1S7clhQo/0.jpg)](https://youtu.be/aIy1S7clhQo) | [![Watch the video](https://img.youtube.com/vi/q56NcgUiAAk/0.jpg)](https://youtu.be/q56NcgUiAAk) | [![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) +--------------------------- | ------------------------------------- | ------------------------- +Basics of Neural Networks in Pascal - Loading and Saving | Neural Networks for Absolute Beginners! Learning a Simple Function | Coding a Neural Network in Pascal that Learns to Calculate the Hypotenuse +[![Watch the video](https://img.youtube.com/vi/tODsv6Ks2DM/0.jpg)](https://youtu.be/tODsv6Ks2DM) | [![Watch the video](https://img.youtube.com/vi/f4T9IB-He_k/0.jpg)](https://youtu.be/f4T9IB-He_k) | [![Watch the video](https://img.youtube.com/vi/o-8NuoSsdck/0.jpg)](https://youtu.be/o-8NuoSsdck) +Pre-trained Neural Networks & Transfer Learning with Pascal's CAI Neural API | Coding a Neural Network in Pascal that Learns the OR Boolean Operation | A Dive into Identity Shortcut Connection - The ResNet building block +[![Watch the video](https://img.youtube.com/vi/SEvWB7k8uy0/0.jpg)](https://youtu.be/SEvWB7k8uy0) | [![Watch the video](https://img.youtube.com/vi/3QwIaAsDmJw/0.jpg)](https://youtu.be/3QwIaAsDmJw) | [![Watch the video](https://img.youtube.com/vi/VH6v3D5cxxs/0.jpg)](https://youtu.be/VH6v3D5cxxs) +Increasing Image Resolution with Neural Networks | Ultra Fast Single Precision Floating Point Computing | AVX and AVX2 Code Optimization Some videos make referrence to **uvolume** unit. The current **neuralvolume** unit used to be called **uvolume**. This is why it's mentioned. diff --git a/examples/Hypotenuse/README.md b/examples/Hypotenuse/README.md index 60575f86..4d2d168c 100644 --- a/examples/Hypotenuse/README.md +++ b/examples/Hypotenuse/README.md @@ -1,5 +1,8 @@ # Learning Hypotenuse Function +## 2 Minutes Intro Video +[![Watch the video](https://img.youtube.com/vi/PdNTgI_qSyo/0.jpg)](https://youtu.be/PdNTgI_qSyo) + This example has these main steps: * Preparing training data * Creating the neural network diff --git a/examples/IdentityShortcutConnection/README.md b/examples/IdentityShortcutConnection/README.md index d4042b04..3fc5b272 100644 --- a/examples/IdentityShortcutConnection/README.md +++ b/examples/IdentityShortcutConnection/README.md @@ -1,10 +1,10 @@ # Identity Shortcut Connection -The **identity shortcut connection** is a connection that skips few layers and then is summed to the output of a following +The **identity shortcut connection** is a connection that skips few layers (usually 2 layers) and then is summed with the output of a following layer. You can find more about it in the paper [Deep Residual Learning for Image Recognition](https://arxiv.org/abs/1512.03385) and [here](https://towardsdatascience.com/an-overview-of-resnet-and-its-variants-5281e2f56035). -The main point of attention is the **summation** of outputs. In CAI, this is done via the `TNNetSum` class. This class gets an array -of layers as an input and sums all inputs. For this summation to work, the shape of each input must be the same otherwise you'll +The main point of attention is the **summation** of outputs. In CAI, this is done via the `TNNetSum` class. `TNNetSum` sums +an array of input layers. For this summation to work, the shape of each input must be the same otherwise you'll get a run time error. The current example shows this: ``` GlueLayer := NN.AddLayer(TNNetReLU.Create()); diff --git a/examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpi b/examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpi new file mode 100644 index 00000000..9ac174b5 --- /dev/null +++ b/examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpi @@ -0,0 +1,165 @@ + + + + + + + + + + + + + <UseAppBundle Value="False"/> + <ResourceType Value="res"/> + </General> + <i18n> + <EnableI18N LFM="False"/> + </i18n> + <BuildModes Count="3"> + <Item1 Name="Default" Default="True"/> + <Item2 Name="Debug"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\OnlyOneNeuron2x3y4"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <Parsing> + <SyntaxOptions> + <IncludeAssertionCode Value="True"/> + </SyntaxOptions> + </Parsing> + <CodeGeneration> + <Checks> + <IOChecks Value="True"/> + <RangeChecks Value="True"/> + <OverflowChecks Value="True"/> + <StackChecks Value="True"/> + </Checks> + <VerifyObjMethodCallValidity Value="True"/> + </CodeGeneration> + <Linking> + <Debugging> + <DebugInfoType Value="dsDwarf2Set"/> + <UseValgrind Value="True"/> + <UseExternalDbgSyms Value="True"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dDebug +-dAVX"/> + <OtherDefines Count="2"> + <Define0 Value="Debug"/> + <Define1 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item2> + <Item3 Name="Release"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\OnlyOneNeuron2x3y4"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <SmartLinkUnit Value="True"/> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + </Debugging> + <LinkSmart Value="True"/> + </Linking> + <Other> + <OtherDefines Count="6"> + <Define0 Value="Release"/> + <Define1 Value="Debug"/> + <Define2 Value="CheckRange"/> + <Define3 Value="AVX2"/> + <Define4 Value="AVX"/> + <Define5 Value="OpenCL"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item3> + </BuildModes> + <PublishOptions> + <Version Value="2"/> + </PublishOptions> + <RunParams> + <FormatVersion Value="2"/> + <Modes Count="1"> + <Mode0 Name="default"/> + </Modes> + </RunParams> + <RequiredPackages Count="1"> + <Item1> + <PackageName Value="multithreadprocslaz"/> + </Item1> + </RequiredPackages> + <Units Count="1"> + <Unit0> + <Filename Value="OnlyOneNeuron2x3y4.lpr"/> + <IsPartOfProject Value="True"/> + </Unit0> + </Units> + </ProjectOptions> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\OnlyOneNeuron2x3y4"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\experiments\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dAVX +-dRelease"/> + <OtherDefines Count="2"> + <Define0 Value="AVX"/> + <Define1 Value="Release"/> + </OtherDefines> + </Other> + </CompilerOptions> + <Debugging> + <Exceptions Count="3"> + <Item1> + <Name Value="EAbort"/> + </Item1> + <Item2> + <Name Value="ECodetoolError"/> + </Item2> + <Item3> + <Name Value="EFOpenError"/> + </Item3> + </Exceptions> + </Debugging> +</CONFIG> diff --git a/examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpr b/examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpr new file mode 100644 index 00000000..fdd12750 --- /dev/null +++ b/examples/OnlyOneNeuron/OnlyOneNeuron2x3y4.lpr @@ -0,0 +1,91 @@ +program OnlyOneNeuron2x3y4; +(* +OnlyOneNeuronOrOperation: this free pascal source code trains a neural network +that contains only one neuron to learn the function f(x,y) = 2x - 3y + 4. +Copyright (C) 2023 Joao Paulo Schwarz Schuler + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along +with this program; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +*) + + +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, + neuralnetwork, + neuralvolume, + neuralfit; + +type + // Define the input and output types for training data + TBackFloatInput = array[0..1] of TNeuralFloat; // Input data for 2x - 3y + 4 + TBackFloatOutput = array[0..0] of TNeuralFloat; // Expected output for 2x - 3y + 4 + +procedure RunFloatAlgo(); +var + NN: TNNet; + EpochCnt: integer; + pOutPut: TNNetVolume; + vInputs: TBackFloatInput; + vOutput: TBackFloatOutput; +begin + NN := TNNet.Create(); + + // Create the neural network layers + NN.AddLayer(TNNetInput.Create(2)); // Input layer with 2 neurons + NN.AddLayer(TNNetFullConnectLinear.Create(1)); // Single neuron layer connected to both inputs from the previous layer. + + NN.SetLearningRate(0.0001, 0); // Set the learning rate and momentum + + pOutPut := TNNetVolume.Create(1, 1, 1, 1); // Create a volume to hold the output + + WriteLn; + + for EpochCnt := 1 to 100000 do + begin + vInputs[0] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vInputs[1] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vOutput[0] := 2*vInputs[0] - 3*vInputs[1] + 4; // 2x - 3y + 4 + // Feed forward and backpropagation + NN.Compute(vInputs); // Perform feedforward computation + NN.GetOutput(pOutPut); // Get the output of the network + NN.Backpropagate(vOutput); // Perform backpropagation to adjust weights + + if EpochCnt mod 5000 = 0 then + WriteLn( + EpochCnt:7, 'x', + ' Output:', pOutPut.Raw[0]:5:2,' ', + ' - Training/Desired Output:', vOutput[0]:5:2,' ' + ); + end; + + NN.DebugWeights(); // Display the final weights of the network + + pOutPut.Free; // Free the memory allocated for output + NN.Free; // Free the memory allocated for the network + + Write('Press ENTER to exit.'); + ReadLn; +end; + +var + // Stops Lazarus errors + Application: record Title:string; end; + +begin + Application.Title:='Only One Neuron - 2x - 3y + 4'; + RunFloatAlgo(); +end. diff --git a/examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpi b/examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpi new file mode 100644 index 00000000..7c2fa59f --- /dev/null +++ b/examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpi @@ -0,0 +1,165 @@ +<?xml version="1.0" encoding="UTF-8"?> +<CONFIG> + <ProjectOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <General> + <Flags> + <MainUnitHasCreateFormStatements Value="False"/> + </Flags> + <SessionStorage Value="InProjectDir"/> + <MainUnit Value="0"/> + <Title Value="Only One Neuron - OR Operation"/> + <UseAppBundle Value="False"/> + <ResourceType Value="res"/> + </General> + <i18n> + <EnableI18N LFM="False"/> + </i18n> + <BuildModes Count="3"> + <Item1 Name="Default" Default="True"/> + <Item2 Name="Debug"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\OnlyOneNeuronOrOperation"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <Parsing> + <SyntaxOptions> + <IncludeAssertionCode Value="True"/> + </SyntaxOptions> + </Parsing> + <CodeGeneration> + <Checks> + <IOChecks Value="True"/> + <RangeChecks Value="True"/> + <OverflowChecks Value="True"/> + <StackChecks Value="True"/> + </Checks> + <VerifyObjMethodCallValidity Value="True"/> + </CodeGeneration> + <Linking> + <Debugging> + <DebugInfoType Value="dsDwarf2Set"/> + <UseValgrind Value="True"/> + <UseExternalDbgSyms Value="True"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dDebug +-dAVX"/> + <OtherDefines Count="2"> + <Define0 Value="Debug"/> + <Define1 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item2> + <Item3 Name="Release"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\OnlyOneNeuronOrOperation"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <SmartLinkUnit Value="True"/> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + </Debugging> + <LinkSmart Value="True"/> + </Linking> + <Other> + <OtherDefines Count="6"> + <Define0 Value="Release"/> + <Define1 Value="Debug"/> + <Define2 Value="CheckRange"/> + <Define3 Value="AVX2"/> + <Define4 Value="AVX"/> + <Define5 Value="OpenCL"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item3> + </BuildModes> + <PublishOptions> + <Version Value="2"/> + </PublishOptions> + <RunParams> + <FormatVersion Value="2"/> + <Modes Count="1"> + <Mode0 Name="default"/> + </Modes> + </RunParams> + <RequiredPackages Count="1"> + <Item1> + <PackageName Value="multithreadprocslaz"/> + </Item1> + </RequiredPackages> + <Units Count="1"> + <Unit0> + <Filename Value="OnlyOneNeuronOrOperation.lpr"/> + <IsPartOfProject Value="True"/> + </Unit0> + </Units> + </ProjectOptions> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\OnlyOneNeuronOrOperation"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\experiments\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dAVX +-dRelease"/> + <OtherDefines Count="2"> + <Define0 Value="AVX"/> + <Define1 Value="Release"/> + </OtherDefines> + </Other> + </CompilerOptions> + <Debugging> + <Exceptions Count="3"> + <Item1> + <Name Value="EAbort"/> + </Item1> + <Item2> + <Name Value="ECodetoolError"/> + </Item2> + <Item3> + <Name Value="EFOpenError"/> + </Item3> + </Exceptions> + </Debugging> +</CONFIG> diff --git a/examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpr b/examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpr new file mode 100644 index 00000000..c20424d2 --- /dev/null +++ b/examples/OnlyOneNeuron/OnlyOneNeuronOrOperation.lpr @@ -0,0 +1,126 @@ +program OnlyOneNeuronOrOperation; +(* +OnlyOneNeuronOrOperation: this free pascal source code trains a neural network +that contains only one neuron to learn the OR boolean operation. +Copyright (C) 2023 Joao Paulo Schwarz Schuler + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along +with this program; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +*) + + +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, + neuralnetwork, + neuralvolume, + neuralfit; + +type + // Define the input and output types for training data + TBackInput = array[0..3] of array[0..1] of TNeuralFloat; // Input data for OR operation + TBackOutput = array[0..3] of array[0..0] of TNeuralFloat; // Expected output for OR operation + +const + cs_false = 0.1; // Encoding for "false" value + cs_true = 0.8; // Encoding for "true" value + cs_threshold = (cs_false + cs_true) / 2; // Threshold for False/True values + +const + cs_inputs : TBackInput = + ( + // Input data for OR operation + (cs_false, cs_false), + (cs_false, cs_true), + (cs_true, cs_false), + (cs_true, cs_true) + ); + +const + cs_outputs : TBackOutput = + ( + // Expected outputs for OR operation + (cs_false), + (cs_true), + (cs_true), + (cs_true) + ); + +procedure RunAlgo(); +var + NN: TNNet; + EpochCnt: integer; + Cnt: integer; + pOutPut: TNNetVolume; + vInputs: TBackInput; + vOutput: TBackOutput; +begin + NN := TNNet.Create(); + + // Create the neural network layers + NN.AddLayer(TNNetInput.Create(2)); // Input layer with 2 inputs + NN.AddLayer(TNNetFullConnectLinear.Create(1)); // Single neuron layer connected to both inputs from the previous layer. + + NN.SetLearningRate(0.01, 0.9); // Set the learning rate and momentum + + vInputs := cs_inputs; // Assign the input data + vOutput := cs_outputs; // Assign the expected output data + pOutPut := TNNetVolume.Create(1, 1, 1, 1); // Create a volume to hold the computed output + + WriteLn('The value encoding FALSE is: ', cs_false:4:2); // Display the encoding for "false" + WriteLn('The value encoding TRUE is: ', cs_true:4:2); // Display the encoding for "true" + WriteLn('The threshold is: ', cs_threshold:4:2); // Display the threshold value + WriteLn; + + for EpochCnt := 1 to 600 do + begin + for Cnt := Low(cs_inputs) to High(cs_inputs) do + begin + // Feed forward and backpropagation + NN.Compute(vInputs[Cnt]); // Perform feedforward computation + NN.GetOutput(pOutPut); // Get the output of the network + NN.Backpropagate(vOutput[Cnt]); // Perform backpropagation to adjust weights + + if EpochCnt mod 100 = 0 then + WriteLn( + EpochCnt:7, 'x', Cnt, + ' Inputs:', vInputs[Cnt][0]:5:3,' ',vInputs[Cnt][1]:5:3, + ' Computed Output:', pOutPut.Raw[0]:5:2,' ', + ' Desired Output:', vOutput[cnt][0]:5:2 + ); + end; + + if EpochCnt mod 100 = 0 then + WriteLn(); + end; + + NN.DebugWeights(); // Display the final weights of the network + + pOutPut.Free; // Free the memory allocated for output + NN.Free; // Free the memory allocated for the network + + Write('Press ENTER to exit.'); + ReadLn; +end; + +var + // Stops Lazarus errors + Application: record Title:string; end; + +begin + Application.Title:='Only One Neuron - OR Operation'; + RunAlgo(); +end. diff --git a/examples/OnlyOneNeuron/README.md b/examples/OnlyOneNeuron/README.md new file mode 100644 index 00000000..d05ab50d --- /dev/null +++ b/examples/OnlyOneNeuron/README.md @@ -0,0 +1,182 @@ +# Only One Neuron - Source Code Examples + +To make the learning of neural networks very easy, this folder contains 2 source code examples with neural networks that contain only one neuron: +* Learn the linear function 2*x + 3*y + 4. +* Learn the boolean OR operation. + +## Learn the linear function 2*x + 3*y + 4 +This example covers: +* data preparation (the training data), +* neural network architecture, +* training and testing. + +### The Training Data +While training, we calculate inputs and expected outputs with: +``` + vInputs[0] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vInputs[1] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vOutput[0] := 2*vInputs[0] - 3*vInputs[1] + 4; // 2x - 3y + 4 +``` +The actual data structures with input and output are defined with: +``` +type + // Define the input and output types for training data + TBackFloatInput = array[0..1] of TNeuralFloat; // Input data for 2x - 3y + 4 + TBackFloatOutput = array[0..0] of TNeuralFloat; // Expected output for 2x - 3y + 4 +... +var +... + vInputs: TBackFloatInput; + vOutput: TBackFloatOutput; +``` +### Neural Network Architecture +The neural network consists of only 2 layers: +* an input layer with two inputs (representing the two inputs of the OR operation) +* and a single output neuron that provides the result. It uses a fully connected architecture without activation function. + +The above is created with: +``` + NN := TNNet.Create(); + // Create the neural network layers + NN.AddLayer(TNNetInput.Create(2)); // Input layer with 2 neurons + NN.AddLayer(TNNetFullConnectLinear.Create(1)); // Single neuron layer connected to both inputs from the previous layer. +``` + +### Training and Testing the Neural Network +The training and testing are done in a single code: +``` + for EpochCnt := 1 to 100000 do + begin + vInputs[0] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vInputs[1] := (Random(10000) - 5000)/100; // Random number in the interval [-50,+50]. + vOutput[0] := 2*vInputs[0] - 3*vInputs[1] + 4; // 2x - 3y + 4 + // Feed forward and backpropagation + NN.Compute(vInputs); // Perform feedforward computation + NN.GetOutput(pOutPut); // Get the output of the network + NN.Backpropagate(vOutput); // Perform backpropagation to adjust weights + + if EpochCnt mod 5000 = 0 then + WriteLn( + EpochCnt:7, 'x', + ' Output:', pOutPut.Raw[0]:5:2,' ', + ' - Training/Desired Output:', vOutput[0]:5:2,' ' + ); + end; +``` +In the above code, `vOutput` has the desired output while `pOutPut` has the output that was calculated by the neural network. In the output, you'll find: +``` + 90000x Output:39.17 - Training/Desired Output:39.17 + 95000x Output: 8.12 - Training/Desired Output: 8.12 + 100000x Output:110.15 - Training/Desired Output:110.15 +``` +As you can see above, the desired output is exactly the calculated output. + +## Learn the Boolean OR Operation +This example covers: +* data preparation (the training data), +* neural network architecture, +* training and testing. + +### The Training Data +The training data consists of all possible input combinations for the OR operation and their corresponding outputs: +``` +- False OR False = False +- False OR True = True +- True OR False = True +- True OR True = True +``` +The numeric values for True and False are defined with: +``` +const + cs_false = 0.1; // Encoding for "false" value + cs_true = 0.8; // Encoding for "true" value + cs_threshold = (cs_false + cs_true) / 2; // Threshold for false/true (neuronal activation) +``` + +Then, the actual data structures with input and output of the OR operation are defined with: + +``` +type + // Define the input and output types for training data + TBackInput = array[0..3] of array[0..1] of TNeuralFloat; // Input data for OR operation + TBackOutput = array[0..3] of array[0..0] of TNeuralFloat; // Expected output for OR operation + +const + cs_inputs : TBackInput = + ( + // Input data for OR operation + (cs_false, cs_false), + (cs_false, cs_true), + (cs_true, cs_false), + (cs_true, cs_true) + ); + +const + cs_outputs : TBackOutput = + ( + // Expected outputs for OR operation + (cs_false), + (cs_true), + (cs_true), + (cs_true) + ); +``` + +### Neural Network Architecture + +The neural network consists of only 2 layers: +* an input layer with two inputs (representing the two inputs of the OR operation) +* and a single output neuron that provides the result. It uses a fully connected architecture without activation function. + +The above neural network is created with: +``` + NN := TNNet.Create(); + // Create the neural network layers + NN.AddLayer(TNNetInput.Create(2)); // Input layer with 2 inputs + NN.AddLayer(TNNetFullConnectLinear.Create(1)); // Single neuron layer connected to both inputs from the previous layer. +``` + +The activation function is not required when the problem to be learned can be solved via linear algebra. + +### Training the Neural Network +The training is done with: +``` + vInputs := cs_inputs; // Assign the input data + vOutput := cs_outputs; // Assign the expected output data + pOutPut := TNNetVolume.Create(1, 1, 1, 1); // Create a volume to hold the computed output + NN.SetLearningRate(0.01, 0.9); // Set the learning rate and momentum + + for EpochCnt := 1 to 600 do + begin + for Cnt := Low(cs_inputs) to High(cs_inputs) do + begin + // Feed forward and backpropagation + NN.Compute(vInputs[Cnt]); // Perform feedforward computation + NN.GetOutput(pOutPut); // Get the output of the network + NN.Backpropagate(vOutput[Cnt]); // Perform backpropagation to adjust weights + + if EpochCnt mod 100 = 0 then + WriteLn( + EpochCnt:7, 'x', Cnt, + ' Inputs:', vInputs[Cnt][0]:5:3,' ',vInputs[Cnt][1]:5:3, + ' Computed Output:', pOutPut.Raw[0]:5:2,' ', + ' Desired Output:', vOutput[cnt][0]:5:2 + ); + end; + + if EpochCnt mod 100 = 0 then + WriteLn(); + end; +``` +After running the above code, the ouput is: +``` +The value encoding FALSE is: 0.10 +The value encoding TRUE is: 0.80 +The threshold is: 0.45 + + 600x0 Inputs:0.100 0.100 Computed Output: 0.27 Desired Output: 0.10 + 600x1 Inputs:0.100 0.800 Computed Output: 0.62 Desired Output: 0.80 + 600x2 Inputs:0.800 0.100 Computed Output: 0.63 Desired Output: 0.80 + 600x3 Inputs:0.800 0.800 Computed Output: 0.98 Desired Output: 0.80 +``` +All values above 0.45 are considered the boolean value `True` while all values below 0.45 are considered `False`. We got the result 100% right. diff --git a/examples/ResNet/CaiResNet20.lpr b/examples/ResNet/CaiResNet20.lpr index 7f9313b0..4d0db767 100644 --- a/examples/ResNet/CaiResNet20.lpr +++ b/examples/ResNet/CaiResNet20.lpr @@ -109,7 +109,7 @@ procedure CaiOptimizedResnetUnit(pNN: TNNet; pNeurons: integer); NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0.0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}32, {epochs=}50); NeuralFit.Free; ReadLn(); diff --git a/examples/ResNet/ResNet20.lpr b/examples/ResNet/ResNet20.lpr index 32eafc88..2e898ec0 100644 --- a/examples/ResNet/ResNet20.lpr +++ b/examples/ResNet/ResNet20.lpr @@ -80,7 +80,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0.0; //NeuralFit.MaxThreadNum := 1; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}32, {epochs=}50); NeuralFit.Free; diff --git a/examples/SelfTest/SelfTest.dpr b/examples/SelfTest/SelfTest.dpr index 1938c065..4ced9544 100644 --- a/examples/SelfTest/SelfTest.dpr +++ b/examples/SelfTest/SelfTest.dpr @@ -6,35 +6,37 @@ uses Classes, SysUtils, Math, - CPUFeatures in '..\..\neural\CPUFeatures.pas', - Neural.AVX in '..\..\neural\Neural.AVX.pas', - Neural.AVXx64 in '..\..\neural\Neural.AVXx64.pas', - neuralab in '..\..\neural\neuralab.pas', - neuralabfun in '..\..\neural\neuralabfun.pas', - neuralbit in '..\..\neural\neuralbit.pas', - neuralbyteprediction in '..\..\neural\neuralbyteprediction.pas', - neuralcache in '..\..\neural\neuralcache.pas', - neuraldatasets in '..\..\neural\neuraldatasets.pas', - neuraldatasetsv in '..\..\neural\neuraldatasetsv.pas', - neuralevolutionary in '..\..\neural\neuralevolutionary.pas', - neuralfit in '..\..\neural\neuralfit.pas', - neuralgeneric in '..\..\neural\neuralgeneric.pas', - neuralnetwork in '..\..\neural\neuralnetwork.pas', - neuralopencl in '..\..\neural\neuralopencl.pas', - neuralopenclv in '..\..\neural\neuralopenclv.pas', - neuralplanbuilder in '..\..\neural\neuralplanbuilder.pas', - neuralthread in '..\..\neural\neuralthread.pas', - neuralvolume in '..\..\neural\neuralvolume.pas', - neuralvolumev in '..\..\neural\neuralvolumev.pas'; + NeuralAVX in '..\..\Neural\NeuralAVX.pas', + NeuralAVXx64 in '..\..\Neural\NeuralAVXx64.pas', + neuralab in '..\..\Neural\neuralab.pas', + neuralabfun in '..\..\Neural\neuralabfun.pas', + neuralbit in '..\..\Neural\neuralbit.pas', + neuralbyteprediction in '..\..\Neural\neuralbyteprediction.pas', + neuralcache in '..\..\Neural\neuralcache.pas', + neuraldatasets in '..\..\Neural\neuraldatasets.pas', + neuraldatasetsv in '..\..\Neural\neuraldatasetsv.pas', + neuralevolutionary in '..\..\Neural\neuralevolutionary.pas', + neuralfit in '..\..\Neural\neuralfit.pas', + neuralgeneric in '..\..\Neural\neuralgeneric.pas', + neuralnetwork in '..\..\Neural\neuralnetwork.pas', + neuralopencl in '..\..\Neural\neuralopencl.pas', + neuralopenclv in '..\..\Neural\neuralopenclv.pas', + neuralplanbuilder in '..\..\Neural\neuralplanbuilder.pas', + neuralthread in '..\..\Neural\neuralthread.pas', + neuralvolume in '..\..\Neural\neuralvolume.pas', + neuralvolumev in '..\..\Neural\neuralvolumev.pas'; begin - WriteLn('Testing Volumes API ...'); - TestTNNetVolume(); - TestKMeans(); + Writeln('Test AVX'); + TestAVX; - WriteLn('Testing Convolutional API ...'); - TestConvolutionAPI; + WriteLn('Testing Volumes API ...'); + TestTNNetVolume(); + TestKMeans(); - WriteLn('Press ENTER to quit.'); - ReadLn; + WriteLn('Testing Convolutional API ...'); + TestConvolutionAPI(); + + WriteLn('Press ENTER to quit.'); + ReadLn; end. diff --git a/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr b/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr index d82aeaf4..e7a95929 100644 --- a/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr +++ b/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr @@ -2,6 +2,12 @@ (* Coded by Joao Paulo Schwarz Schuler. https://github.com/joaopauloschuler/neural-api + ----------------------------------------------- + The code shows an example of training and fitting a convolutional + neural network (CNN) using the Fashion MNIST dataset. It creates a neural + network with specific layers and configurations, loads the fashion MNIST data, + and then trains the network using the provided data. The code also sets + various parameters for training, such as learning rate, decay, and batch size. *) {$mode objfpc}{$H+} @@ -15,34 +21,42 @@ TTestCNNAlgo = class(TCustomApplication) procedure DoRun; override; end; + // Implementation of the TTestCNNAlgo class procedure TTestCNNAlgo.DoRun; var - NN: TNNet; - NeuralFit: TNeuralImageFit; - ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; + NN: TNNet; // Neural network object + NeuralFit: TNeuralImageFit; // Object for training and fitting the neural network + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; // Lists of training, validation, and test image volumes begin + // Checking if the MNIST files exist and loading the data if Not(CheckMNISTFile('train', {IsFashion=}true)) or Not(CheckMNISTFile('t10k', {IsFashion=}true)) then begin Terminate; exit; end; + WriteLn('Creating Neural Network...'); - NN := THistoricalNets.Create(); + + // Creating the neural network with specific layers and configurations + NN := TNNet.Create(); NN.AddLayer([ - TNNetInput.Create(28, 28, 1), - TNNetConvolutionLinear.Create(64, 5, 2, 1, 1), - TNNetMaxPool.Create(4), - TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), - TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), - TNNetFullConnectReLU.Create(32), - TNNetFullConnectReLU.Create(32), - TNNetFullConnectLinear.Create(10), - TNNetSoftMax.Create() + TNNetInput.Create(28, 28, 1), // Input layer for 28x28 grayscale images + TNNetConvolutionLinear.Create(64, 5, 2, 1, 1), // Convolutional layer with linear activation + TNNetMaxPool.Create(4), // Max pooling layer + TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), // Convolutional layer with ReLU activation + TNNetConvolutionReLU.Create(64, 3, 1, 1, 1), // Convolutional layer with ReLU activation + TNNetFullConnectReLU.Create(32), // Fully connected layer with ReLU activation + TNNetFullConnectReLU.Create(32), // Fully connected layer with ReLU activation + TNNetFullConnectLinear.Create(10), // Fully connected layer with linear activation + TNNetSoftMax.Create() // Softmax layer for classification ]); + + // Creating the training, validation, and test image volumes from the fashion MNIST files CreateMNISTVolumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, 'train', 't10k', {Verbose=}true, {IsFashion=}true); + // Creating and configuring the NeuralFit object for training the neural network NeuralFit := TNeuralImageFit.Create; NeuralFit.FileNameBase := 'SimpleFashionMNIST'; NeuralFit.InitialLearningRate := 0.001; @@ -53,21 +67,25 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.HasFlipX := true; NeuralFit.HasFlipY := false; NeuralFit.MaxCropSize := 4; + + // Training and fitting the neural network using the provided data NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}128, {epochs=}50); - NeuralFit.Free; - NN.Free; - ImgTestVolumes.Free; - ImgValidationVolumes.Free; - ImgTrainingVolumes.Free; + NeuralFit.Free; // Freeing the NeuralFit object + + NN.Free; // Freeing the neural network object + ImgTestVolumes.Free; // Freeing the test data volumes + ImgValidationVolumes.Free; // Freeing the validation data volumes + ImgTrainingVolumes.Free; // Freeing the training data volumes Terminate; end; var Application: TTestCNNAlgo; begin - Application := TTestCNNAlgo.Create(nil); - Application.Title:='Simple Fashion MNIST Classification Example'; - Application.Run; - Application.Free; + Application := TTestCNNAlgo.Create(nil); // Creating an instance of the TTestCNNAlgo class + Application.Title:='Simple Fashion MNIST Classification Example'; // Setting the application title + Application.Run; // Running the application + Application.Free; // Freeing the application instance end. + diff --git a/examples/SimpleImageClassifier/README.md b/examples/SimpleImageClassifier/README.md index 0bce7044..69e4895e 100644 --- a/examples/SimpleImageClassifier/README.md +++ b/examples/SimpleImageClassifier/README.md @@ -29,22 +29,80 @@ Later on, this is how the training/fitting is called: NeuralFit := TNeuralImageFit.Create; NeuralFit.FileNameBase := 'SimpleImageClassifier'; NeuralFit.InitialLearningRate := 0.001; - NeuralFit.LearningRateDecay := 0.005; - NeuralFit.StaircaseEpochs := 17; + NeuralFit.LearningRateDecay := 0.01; + NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; - NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}128, {epochs=}100); + NeuralFit.L2Decay := 0; + NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); ``` -There is a trick that you can do with this API or any other API when working with image classification: **you can increase the input image size**. +## Beyond ReLU Activation Function -As per the following example, by increasing CIFAR-10 input image sizes from 32x32 to 48x48, you can gain up to 2% in classification accuracy. +The paper [Searching for Activation Functions](https://arxiv.org/abs/1710.05941) describes the search for a better activation function than **ReLU**. In their work, the authors found **Swish** to be the best replacement for **ReLU**. The downside of Swish is: it requires a lot of computation to calculate it. Later, the paper [Searching for MobileNetV3](https://arxiv.org/pdf/1905.02244v5.pdf) introduces the **Hard Swish** activation function. The **Hard Swish** gives similar results to **Swish** with a lot less computation. -You can change image sizes with: +The same neural network shown above could be implemented with **Swish** as ``` -ImgTrainingVolumes.ResizeImage(48, 48); -ImgValidationVolumes.ResizeImage(48, 48); -ImgTestVolumes.ResizeImage(48, 48); + NN.AddLayer([ + TNNetInput.Create(32, 32, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), + TNNetMaxPool.Create(4), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetSwish.Create(), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(10), + TNNetSoftMax.Create() + ]); +``` + +or as +``` + NN.AddLayer([ + TNNetInput.Create(32, 32, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), + TNNetMaxPool.Create(4), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(10), + TNNetSoftMax.Create() + ]); +``` + +The Hard Swish variant is implemented with: +``` + NN.AddLayer([ + TNNetInput.Create(32, 32, 3), + TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), + TNNetMaxPool.Create(4), + TNNetMovingStdNormalization.Create(), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetConvolutionHardSwish.Create({Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1), + TNNetDropout.Create(0.5), + TNNetMaxPool.Create(2), + TNNetFullConnectLinear.Create(10), + TNNetSoftMax.Create() + ]); ``` -You can find an implementation with this trick at the [SimpleImageClassifierResize48.lpr](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr) file. There is also another implementation resizing to CIFAR-10 to 64x64 pixels but the gain won't be too big. + +These are the CIFAR-10 classification accuracies with ReLU, Swish and HardSwish activation functions: + +Activation Function (source) | Test Classification Accuracy (%) +---------------------------- | ------------------------------------- +[ReLU](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifier.lpr) | [85.53%](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/results/SimpleImageClassifier20221206.csv) +[Swish](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr) | [86.55%](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/results/SimpleImageClassifierSwish20221207.csv) +[Hard Swish](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/SimpleImageClassifierHardSwish.lpr) | [86.82%](https://github.com/joaopauloschuler/neural-api/blob/master/examples/SimpleImageClassifier/results/SimpleImageClassifierHardSwish20221208.csv) diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr index 98e6e9ad..d47403b4 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierGroupedConv.lpr @@ -34,13 +34,13 @@ TTestCNNAlgo = class(TCustomApplication) TNNetConvolutionLinear.Create({Features=}64, {FeatureSize=}5, {Padding=}2, {Stride=}1, {SuppressBias=}1), TNNetMaxPool.Create(4) ]); - NN.AddGroupedConvolution(TNNetConvolutionReLU, - {Groups=}8, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); - NN.AddGroupedConvolution(TNNetConvolutionReLU, + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); - NN.AddGroupedConvolution(TNNetConvolutionReLU, - {Groups=}8, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); - NN.AddGroupedConvolution(TNNetConvolutionReLU, + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, + {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, + {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); + NN.AddGroupedConvolution(TNNetConvolutionHardSwish, {Groups=}4, {Features=}64, {FeatureSize=}3, {Padding=}1, {Stride=}1, {SuppressBias=}1); NN.AddLayer([ TNNetMaxPool.Create(2), @@ -52,12 +52,12 @@ TTestCNNAlgo = class(TCustomApplication) CreateCifar10Volumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes); NeuralFit := TNeuralImageFit.Create; - NeuralFit.FileNameBase := 'SimpleImageClassifier-'+IntToStr(GetProcessId()); + NeuralFit.FileNameBase := 'SimpleImageClassifierGroupedConv-'+IntToStr(GetProcessId()); NeuralFit.InitialLearningRate := 0.001; NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; //NeuralFit.MaxThreadNum := 1; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr index 2a51a64d..63fdf802 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCropping.lpr @@ -79,7 +79,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr index 554350f2..578242bb 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierPaddingCroppingSwish.lpr @@ -83,7 +83,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr index d809b8af..a3d994b4 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierReLU6.lpr @@ -56,7 +56,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr index e6bd6809..2f22770f 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr @@ -54,7 +54,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.MaxCropSize := 12; //NeuralFit.MaxThreadNum := 8; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr index e0ef256e..11c80681 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr @@ -54,7 +54,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.MaxCropSize := 16; //NeuralFit.MaxThreadNum := 8; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); diff --git a/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr b/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr index bd72f63e..34d1aa8d 100644 --- a/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr +++ b/examples/SimpleImageClassifier/SimpleImageClassifierSwish.lpr @@ -56,7 +56,7 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.LearningRateDecay := 0.01; NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; - NeuralFit.L2Decay := 0.00001; + NeuralFit.L2Decay := 0; NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}64, {epochs=}50); NeuralFit.Free; diff --git a/examples/SimpleMNist/SimpleMNist.lpr b/examples/SimpleMNist/SimpleMNist.lpr index 1ea56a11..e780af12 100644 --- a/examples/SimpleMNist/SimpleMNist.lpr +++ b/examples/SimpleMNist/SimpleMNist.lpr @@ -7,7 +7,8 @@ uses {$IFDEF UNIX} {$IFDEF UseCThreads} cthreads, {$ENDIF} {$ENDIF} - Classes, SysUtils, CustApp, neuralnetwork, neuralvolume, Math, neuraldatasets, neuralfit; + Classes, SysUtils, CustApp, neuralnetwork, neuralvolume, Math, + neuraldatasets, neuralfit; type TTestCNNAlgo = class(TCustomApplication) @@ -17,17 +18,21 @@ TTestCNNAlgo = class(TCustomApplication) procedure TTestCNNAlgo.DoRun; var - NN: THistoricalNets; - NeuralFit: TNeuralImageFit; - ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; + NN: TNNet; // Neural network object + NeuralFit: TNeuralImageFit; // Object for neural network fitting + ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes: TNNetVolumeList; // Volumes for training, validation, and testing begin - if Not(CheckMNISTFile('train')) or Not(CheckMNISTFile('t10k')) then + // Check if MNIST files exist + if not (CheckMNISTFile('train')) or not (CheckMNISTFile('t10k')) then begin Terminate; - exit; + Exit; // Exit the procedure if MNIST files are not found end; + WriteLn('Creating Neural Network...'); - NN := THistoricalNets.Create(); + NN := TNNet.Create(); // Create an instance of the neural network + + // Define the layers of the neural network NN.AddLayer([ TNNetInput.Create(28, 28, 1), TNNetConvolutionLinear.Create(32, 5, 2, 1, 1), @@ -41,8 +46,11 @@ TTestCNNAlgo = class(TCustomApplication) TNNetFullConnectLinear.Create(10), TNNetSoftMax.Create() ]); + + // Create MNIST volumes for training, validation, and testing CreateMNISTVolumes(ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, 'train', 't10k'); + // Configure the neural network fitting NeuralFit := TNeuralImageFit.Create; NeuralFit.FileNameBase := 'SimpleMNist'; NeuralFit.InitialLearningRate := 0.001; @@ -50,12 +58,15 @@ TTestCNNAlgo = class(TCustomApplication) NeuralFit.StaircaseEpochs := 10; NeuralFit.Inertia := 0.9; NeuralFit.L2Decay := 0.00001; - NeuralFit.HasFlipX := false; - NeuralFit.HasFlipY := false; + NeuralFit.HasFlipX := False; + NeuralFit.HasFlipY := False; NeuralFit.MaxCropSize := 4; + + // Fit the neural network using the training, validation, and testing volumes NeuralFit.Fit(NN, ImgTrainingVolumes, ImgValidationVolumes, ImgTestVolumes, {NumClasses=}10, {batchsize=}128, {epochs=}20); NeuralFit.Free; + // Clean up resources NN.Free; ImgTestVolumes.Free; ImgValidationVolumes.Free; @@ -66,8 +77,8 @@ TTestCNNAlgo = class(TCustomApplication) var Application: TTestCNNAlgo; begin - Application := TTestCNNAlgo.Create(nil); - Application.Title:='MNist Classification Example'; - Application.Run; - Application.Free; + Application := TTestCNNAlgo.Create(nil); // Create an instance of TTestCNNAlgo + Application.Title := 'MNist Classification Example'; // Set the application title + Application.Run; // Run the application + Application.Free; // Free the application instance end. diff --git a/examples/SimpleNLP/NLP_CAI_TinyStories_Simple_Example.ipynb b/examples/SimpleNLP/NLP_CAI_TinyStories_Simple_Example.ipynb new file mode 100644 index 00000000..7fd7c39d --- /dev/null +++ b/examples/SimpleNLP/NLP_CAI_TinyStories_Simple_Example.ipynb @@ -0,0 +1,6066 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "VwjiaCJVRG_b" + }, + "outputs": [], + "source": [ + "!apt-get -y install fpc fpc-source lazarus git subversion" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "eODNMehURJrs" + }, + "outputs": [], + "source": [ + "!svn checkout https://svn.code.sf.net/p/cai/svncode/trunk/lazarus neural-api" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "JEb0v3dYRKLL" + }, + "outputs": [], + "source": [ + "!svn checkout https://svn.code.sf.net/p/lazarus-ccr/svn/components/multithreadprocs mtprocs" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "5ynmsJXeRMkY" + }, + "outputs": [], + "source": [ + "!lazbuild mtprocs/multithreadprocslaz.lpk" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "EHmsaJRulzjN" + }, + "outputs": [], + "source": [ + "!pip install datasets" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "X_OQWnXWluGR" + }, + "outputs": [], + "source": [ + "import os\n", + "import urllib.request\n", + "from datasets import load_dataset" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "Q9YcTcqJmCFm" + }, + "outputs": [], + "source": [ + "wikids = load_dataset(\"roneneldan/TinyStories\", split=\"train\")" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/", + "height": 108 + }, + "id": "1Me6q1__wT9q", + "outputId": "08974105-4da8-4960-8bd5-767283ab7209" + }, + "outputs": [ + { + "data": { + "application/vnd.google.colaboratory.intrinsic+json": { + "type": "string" + }, + "text/plain": [ + "'Once upon a time, there was a little car named Beep. Beep loved to go fast and play in the sun. Beep was a healthy car because he always had good fuel. Good fuel made Beep happy and strong.\\n\\nOne day, Beep was driving in the park when he saw a big tree. The tree had many leaves that were falling. Beep liked how the leaves fall and wanted to play with them. Beep drove under the tree and watched the leaves fall on him. He laughed and beeped his horn.\\n\\nBeep played with the falling leaves all day. When it was time to go home, Beep knew he needed more fuel. He went to the fuel place and got more healthy fuel. Now, Beep was ready to go fast and play again the next day. And Beep lived happily ever after.'" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "wikids[1]['text']" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "G7eFszt0RO-3", + "outputId": "814eea6d-7898-42c1-fb73-07269fda3298" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "99983 loaded rows.\n", + "199955 loaded rows.\n", + "299954 loaded rows.\n", + "399946 loaded rows.\n", + "499945 loaded rows.\n", + "599945 loaded rows.\n", + "699935 loaded rows.\n", + "799935 loaded rows.\n", + "899934 loaded rows.\n", + "999929 loaded rows.\n", + "1099908 loaded rows.\n", + "1199892 loaded rows.\n", + "1299892 loaded rows.\n", + "1399870 loaded rows.\n", + "1499833 loaded rows.\n", + "1599832 loaded rows.\n", + "1699819 loaded rows.\n", + "1799819 loaded rows.\n", + "1899818 loaded rows.\n", + "1999789 loaded rows.\n", + "2099789 loaded rows.\n", + "We have 2119489 strings in the dataset out of a total of 2119719 .\n" + ] + } + ], + "source": [ + "MIN_TRAINING_SEQ_LEN = 20\n", + "prepared_ds = []\n", + "row_cnt = 0\n", + "# for ds_row in wikids:\n", + "for ds_row in wikids:\n", + " row_cnt = row_cnt + 1\n", + " new_row = ds_row['text'].strip(\" '\\\"\")\n", + " new_row = new_row.replace(' .', '.').replace(' ,', ',').replace(' !', '!').replace(' ?', '?').replace(' ;', ';').replace(' :', ':').replace(\" '\", \"'\")\n", + " new_row = new_row.replace('<unk>', '').replace(' ', ' ')\n", + " # remove non ascii characters from new_row\n", + " new_row = ''.join([i if (ord(i) < 128) and (ord(i) > 31) else '' for i in new_row])\n", + " # remove any linefeed\n", + " new_row = new_row.replace('\\n', '')\n", + " new_row_len = len(new_row)\n", + " if ( new_row_len > MIN_TRAINING_SEQ_LEN ):\n", + " prepared_ds.append(new_row)\n", + " # if row_cnt > 100000: break\n", + " if row_cnt % 100000 == 0:\n", + " print(len(prepared_ds), \"loaded rows.\")\n", + "print(\"We have\", len(prepared_ds), \"strings in the dataset out of a total of\", row_cnt,'.')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "pQhV30NLr6CO" + }, + "outputs": [], + "source": [ + "small_ds = prepared_ds[:600]\n", + "medium_ds = prepared_ds[:60000]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "f_E87fjAy9HM", + "outputId": "558673e8-7c1b-4a5d-84f5-679269a88348" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "rm: cannot remove '*.txt': No such file or directory\n" + ] + } + ], + "source": [ + "!rm *.txt" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "rDhCwEWc5qfv" + }, + "outputs": [], + "source": [ + "def save_dataset(dataset, filename):\n", + " with open(filename, 'w') as f:\n", + " for item in dataset:\n", + " f.write(\"%s\\n\" % item)\n", + "\n", + "save_dataset(small_ds, 'tinystories_small.txt')\n", + "save_dataset(medium_ds, 'tinystories_medium_ds.txt')\n", + "save_dataset(prepared_ds,'tinystories.txt')" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "pFMNs9Si6a3g", + "outputId": "f0dfe860-e456-4216-8912-a54f734f5d1d" + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " adding: tinystories_small.txt (deflated 68%)\n", + " adding: tinystories_medium_ds.txt (deflated 70%)\n", + " adding: tinystories.txt (deflated 69%)\n" + ] + } + ], + "source": [ + "!zip -r tinystories_small.zip tinystories_small.txt\n", + "!zip -r tinystories_medium_ds.zip tinystories_medium_ds.txt\n", + "!zip -r tinystories.zip tinystories.txt" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "H8bjA5jERPWD" + }, + "outputs": [], + "source": [ + "code = \"\"\"\n", + "program SimpleNLP;\n", + "(*\n", + "Copyright (C) 2023 Joao Paulo Schwarz Schuler\n", + "\n", + "This program is free software; you can redistribute it and/or modify\n", + "it under the terms of the GNU General Public License as published by\n", + "the Free Software Foundation; either version 2 of the License, or\n", + "any later version.\n", + "\n", + "This program is distributed in the hope that it will be useful,\n", + "but WITHOUT ANY WARRANTY; without even the implied warranty of\n", + "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n", + "GNU General Public License for more details.\n", + "\n", + "You should have received a copy of the GNU General Public License along\n", + "with this program; if not, write to the Free Software Foundation, Inc.,\n", + "51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.\n", + "*)\n", + "\n", + "{$mode objfpc}{$H+}\n", + "\n", + "uses {$IFDEF UNIX} {$IFDEF UseCThreads}\n", + " cthreads, {$ENDIF} {$ENDIF}\n", + " Classes,\n", + " neuralnetwork,\n", + " neuralvolume,\n", + " neuralfit,\n", + " neuralthread,\n", + " CustApp,\n", + " Math;\n", + "\n", + "const\n", + " csContextLen = 81;\n", + " csTrainingFileName = 'tinystories.txt';\n", + " csVocabSize = 128; // Character based vocabulary/dictionary.\n", + " csMinSampleSize = 3; // Minimum of 3 characters.\n", + "\n", + "type\n", + "\n", + " { TTestFitLoading }\n", + "\n", + " TTestFitLoading = class(TCustomApplication)\n", + " protected\n", + " FDataset: TStringList;\n", + " FDatasetSize: integer;\n", + " FNN: TNNet;\n", + " NFit: TNeuralDataLoadingFit;\n", + " FSampler: TNNetSamplerBase;\n", + " FMaxPredictCharPos: integer;\n", + " procedure LoadDataset;\n", + " procedure DoRun; override;\n", + " public\n", + " procedure OnAfterEpoch(Sender: TObject);\n", + " procedure GetTrainingPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume);\n", + " procedure GetValidationPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume);\n", + " procedure GetTestPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume);\n", + " end;\n", + "\n", + " procedure TTestFitLoading.LoadDataset;\n", + " var\n", + " RowCnt: integer;\n", + " begin\n", + " FDataset.LoadFromFile(csTrainingFileName);\n", + " FDatasetSize := FDataset.Count;\n", + " for RowCnt := FDatasetSize-1 downto 0 do\n", + " begin\n", + " // removes too short strings\n", + " if Length(FDataset[RowCnt])<csMinSampleSize then FDataset.Delete(RowCnt);\n", + " end;\n", + " FDatasetSize := FDataset.Count;\n", + " for RowCnt := FDatasetSize-1 downto 0 do\n", + " begin\n", + " // removes too short strings\n", + " FDataset[RowCnt] := LowerCase(FDataset[RowCnt]) + chr(1);\n", + " end;\n", + " WriteLn('Loaded dataset with ', FDatasetSize, ' rows');\n", + " end;\n", + "\n", + " procedure TTestFitLoading.DoRun;\n", + " begin\n", + " FDataset := TStringList.Create();\n", + " LoadDataset();\n", + " FNN := TNNet.Create();\n", + " NFit := TNeuralDataLoadingFit.Create();\n", + " FMaxPredictCharPos := csMinSampleSize;\n", + " FSampler := TNNetSamplerTopP.Create(0.4);\n", + " FNN.AddLayer([\n", + " TNNetInput.Create(csContextLen, 1, csVocabSize),\n", + " TNNetPointwiseConv.Create(32,1),\n", + " TNNetPadXY.Create(1,0),\n", + " TNNetConvolutionReLU.Create(64,3,0,1,1),\n", + " TNNetMaxPool.Create(3),\n", + " TNNetPadXY.Create(1,0),\n", + " TNNetConvolutionReLU.Create(128*3,3,0,1,1),\n", + " TNNetPointwiseConvReLU.Create(1024,0),\n", + " TNNetMaxPoolWithPosition.Create(27,27,0,1,0),\n", + " TNNetPointwiseConvReLU.Create(1024),\n", + " TNNetPointwiseConvReLU.Create(128),\n", + " TNNetFullConnectLinear.Create(csVocabSize),\n", + " TNNetSoftMax.Create()\n", + " ]);\n", + " DebugThreadCount();\n", + " FNN.DebugStructure;\n", + "\n", + " WriteLn('Computing...');\n", + " NFit.MaxThreadNum := 32;\n", + " NFit.LogEveryBatches := 100;\n", + " NFit.InitialLearningRate := 0.0001;\n", + " NFit.LearningRateDecay := 0;\n", + " NFit.L2Decay := 0;\n", + " NFit.EnableClassComparison();\n", + " NFit.EnableDefaultLoss();\n", + " NFit.AvgWeightEpochCount := 1;\n", + " NFit.OnAfterEpoch := @OnAfterEpoch;\n", + " NFit.FitLoading(\n", + " FNN,\n", + " {TrainingVolumesCount=}32000*3,\n", + " {ValidationVolumesCount=}32000*3 div 20,\n", + " {TestVolumesCount=}32000*3 div 20,\n", + " {batchsize=}320,\n", + " {epochs=}500,\n", + " @GetTrainingPair, @GetValidationPair, @GetTestPair\n", + " );\n", + " FNN.DebugWeights();\n", + " OnAfterEpoch(Self);\n", + " FSampler.Free;\n", + " NFit.Free;\n", + " FNN.Free;\n", + " FDataset.Free;\n", + " Terminate;\n", + " end;\n", + "\n", + " procedure TTestFitLoading.OnAfterEpoch(Sender: TObject);\n", + " begin\n", + " WriteLn('Testing.');\n", + " WriteLn(GenerateStringFromChars(NFit.NN, 'once', FSampler),'.');\n", + " WriteLn(GenerateStringFromChars(NFit.NN, 'one ', FSampler),'.');\n", + " WriteLn(GenerateStringFromChars(NFit.NN, 'once upon ', FSampler),'.');\n", + " if NFit.TrainingAccuracy < 0.5\n", + " then FMaxPredictCharPos := Max(FMaxPredictCharPos-1, csMinSampleSize)\n", + " else FMaxPredictCharPos := Min(FMaxPredictCharPos+1, csContextLen);\n", + " WriteLn('Max prediction pos is: ', FMaxPredictCharPos);\n", + " end;\n", + "\n", + " procedure TTestFitLoading.GetTrainingPair(Idx: integer; ThreadId: integer;\n", + " pInput, pOutput: TNNetVolume);\n", + " var\n", + " SampleId: integer;\n", + " SampleLen: integer;\n", + " SampleCutPosition: integer;\n", + " ExpectedTokenChar: char;\n", + " ExpectedTokenInt: integer;\n", + " begin\n", + " // Make sure that expected input and output have the proper sizes.\n", + " if FNN.GetFirstLayer().Output.Size <> pInput.Size then pInput.ReSize(FNN.GetFirstLayer().Output);\n", + " if FNN.GetLastLayer().Output.Size <> pOutput.Size then pOutput.ReSize(FNN.GetLastLayer().Output);\n", + " // Get the input sample\n", + " SampleId := Random(FDatasetSize);\n", + " SampleLen := Min(Length(FDataset[SampleId]), pInput.SizeX);\n", + " SampleLen := Min(FMaxPredictCharPos, SampleLen);\n", + " SampleCutPosition := Random(SampleLen-csMinSampleSize)+csMinSampleSize; // -1\n", + " // The expected token is the next character in the string\n", + " ExpectedTokenChar := FDataset[SampleId][SampleCutPosition+1];\n", + " ExpectedTokenInt := Min(Ord(ExpectedTokenChar),pInput.Depth-1);\n", + " // Encode the input and output volumes\n", + " pInput.OneHotEncodingReversed(copy(FDataset[SampleId], 1, SampleCutPosition));\n", + " pOutput.SetClassForSoftMax(ExpectedTokenInt);\n", + " pOutput.Tag := ExpectedTokenInt;\n", + " end;\n", + "\n", + " procedure TTestFitLoading.GetValidationPair(Idx: integer; ThreadId: integer;\n", + " pInput, pOutput: TNNetVolume);\n", + " var\n", + " SampleId: integer;\n", + " SampleLen: integer;\n", + " SampleCutPosition: integer;\n", + " ExpectedTokenChar: char;\n", + " ExpectedTokenInt: integer;\n", + " begin\n", + " // Make sure that expected input and output have the proper sizes.\n", + " if FNN.GetFirstLayer().Output.Size <> pInput.Size then pInput.ReSize(FNN.GetFirstLayer().Output);\n", + " if FNN.GetLastLayer().Output.Size <> pOutput.Size then pOutput.ReSize(FNN.GetLastLayer().Output);\n", + " // Get the input sample\n", + " SampleId := Idx;\n", + " SampleLen := Min(Length(FDataset[SampleId]), pInput.SizeX);\n", + " SampleCutPosition := (Idx mod (1+SampleLen-csMinSampleSize))+csMinSampleSize-1;\n", + " // The expected token is the next character in the string\n", + " ExpectedTokenChar := FDataset[SampleId][SampleCutPosition+1];\n", + " ExpectedTokenInt := Min(Ord(ExpectedTokenChar),pInput.Depth-1);\n", + " // Encode the input and output volumes\n", + " pInput.OneHotEncodingReversed(copy(FDataset[SampleId], 1, SampleCutPosition));\n", + " pOutput.SetClassForSoftMax(ExpectedTokenInt);\n", + " pOutput.Tag := ExpectedTokenInt;\n", + " end;\n", + "\n", + " procedure TTestFitLoading.GetTestPair(Idx: integer; ThreadId: integer;\n", + " pInput, pOutput: TNNetVolume);\n", + " begin\n", + " GetValidationPair(Idx, ThreadId, pInput, pOutput);\n", + " end;\n", + "\n", + "var\n", + " Application: TTestFitLoading;\n", + "begin\n", + " Application := TTestFitLoading.Create(nil);\n", + " Application.Title:='Nano Covolutional Based NLP Trained from File';\n", + " Application.Run;\n", + " Application.Free;\n", + "end.\n", + "\"\"\"\n", + "with open(\"neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpr\", \"w\") as text_file:\n", + " text_file.write(code)\n", + "!lazbuild neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpi\n", + "!ls -l neural-api/bin/x86_64-linux/bin/CaiOptimizedDenseNet" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "colab": { + "base_uri": "https://localhost:8080/" + }, + "id": "eETwGgH1RlJj", + "outputId": "b147f75e-a527-4a1b-e5a0-aac0eacd8aa1" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "\u001b[1;30;43mStreaming output truncated to the last 5000 lines.\u001b[0m\n", + "13792000 Examples seen. Accuracy: 0.8147 Error: 0.48197 Loss: 0.55661 Threads: 8 Forward time: 3.58s Backward time: 4.75s Step time: 37.59s\n", + "13824000 Examples seen. Accuracy: 0.8122 Error: 0.45405 Loss: 0.52630 Threads: 8 Forward time: 3.53s Backward time: 4.41s Step time: 37.42s\n", + "Starting Validation.\n", + "Epochs: 144 Examples seen:13824000 Validation Accuracy: 0.7821 Validation Error: 0.5543 Validation Loss: 0.7302 Total time: 302.47min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.832 Min Weight: -0.863 Max Output: 0.681 Min Output: -0.698 TNNetPointwiseConv 81,1,32 Times: 0.21s 0.08s Parent:0\n", + "Layer 2 Max Output: 0.681 Min Output: -0.698 TNNetPadXY 83,1,32 Times: 0.01s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.555 Min Weight: -0.535 Max Output: 2.933 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.07s Parent:2\n", + "Layer 4 Max Output: 2.933 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 2.933 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.286 Min Weight: -0.309 Max Output: 3.582 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.38s 0.27s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.196 Min Weight: -0.227 Max Output: 1.981 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.59s 0.51s Parent:6\n", + "Layer 8 Max Output: 1.981 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.114 Min Weight: -0.126 Max Output: 1.064 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.73s 0.46s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.197 Min Weight: -0.180 Max Output: 3.811 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.841 Min Weight: -0.473 Max Output: 12.534 Min Output: -4.265 TNNetFullConnectLinear 128,1,1 Times: 0.05s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.817 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8710 minutes. 500 epochs: 15.5913 hours.\n", + "Epochs: 144. Working time: 5.04 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park. she was very sally in the sho.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "13856000 Examples seen. Accuracy: 0.8119 Error: 0.53933 Loss: 0.64356 Threads: 8 Forward time: 3.24s Backward time: 4.18s Step time: 37.19s\n", + "13888000 Examples seen. Accuracy: 0.8095 Error: 0.52206 Loss: 0.65326 Threads: 8 Forward time: 3.11s Backward time: 3.97s Step time: 36.81s\n", + "13920000 Examples seen. Accuracy: 0.8095 Error: 0.54581 Loss: 0.71255 Threads: 8 Forward time: 3.32s Backward time: 4.42s Step time: 36.66s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 145 Examples seen:13920000 Validation Accuracy: 0.7907 Validation Error: 0.5557 Validation Loss: 0.7330 Total time: 304.40min\n", + "Epoch time: 1.8332 minutes. 500 epochs: 15.2767 hours.\n", + "Epochs: 145. Working time: 5.07 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park. he saw a big box with his mom w.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "13952000 Examples seen. Accuracy: 0.8099 Error: 0.46273 Loss: 0.57100 Threads: 8 Forward time: 3.12s Backward time: 4.21s Step time: 37.40s\n", + "13984000 Examples seen. Accuracy: 0.8081 Error: 0.54108 Loss: 0.72608 Threads: 8 Forward time: 3.20s Backward time: 4.16s Step time: 37.54s\n", + "14016000 Examples seen. Accuracy: 0.8149 Error: 0.50454 Loss: 0.60602 Threads: 8 Forward time: 3.32s Backward time: 4.38s Step time: 38.20s\n", + "Starting Validation.\n", + "Epochs: 146 Examples seen:14016000 Validation Accuracy: 0.7894 Validation Error: 0.5586 Validation Loss: 0.7270 Total time: 306.33min\n", + "Epoch time: 1.9102 minutes. 500 epochs: 15.9183 hours.\n", + "Epochs: 146. Working time: 5.11 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sarah was playing in the park. she was three years o.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "14048000 Examples seen. Accuracy: 0.8145 Error: 0.46383 Loss: 0.56319 Threads: 8 Forward time: 3.24s Backward time: 4.29s Step time: 36.83s\n", + "14080000 Examples seen. Accuracy: 0.8166 Error: 0.46486 Loss: 0.58201 Threads: 8 Forward time: 3.28s Backward time: 4.31s Step time: 36.83s\n", + "14112000 Examples seen. Accuracy: 0.8134 Error: 0.50152 Loss: 0.56392 Threads: 8 Forward time: 3.45s Backward time: 4.63s Step time: 37.49s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 147 Examples seen:14112000 Validation Accuracy: 0.7922 Validation Error: 0.5447 Validation Loss: 0.7246 Total time: 308.27min\n", + "Epoch time: 1.8744 minutes. 500 epochs: 15.6204 hours.\n", + "Epochs: 147. Working time: 5.14 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom and dad. she was .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "14144000 Examples seen. Accuracy: 0.8193 Error: 0.50268 Loss: 0.66308 Threads: 8 Forward time: 3.35s Backward time: 4.32s Step time: 37.92s\n", + "14176000 Examples seen. Accuracy: 0.8179 Error: 0.49023 Loss: 0.65870 Threads: 8 Forward time: 3.13s Backward time: 4.21s Step time: 38.08s\n", + "14208000 Examples seen. Accuracy: 0.8223 Error: 0.48067 Loss: 0.56452 Threads: 8 Forward time: 3.21s Backward time: 4.40s Step time: 38.03s\n", + "Starting Validation.\n", + "Epochs: 148 Examples seen:14208000 Validation Accuracy: 0.7888 Validation Error: 0.5481 Validation Loss: 0.7365 Total time: 310.21min\n", + "Epoch time: 1.9016 minutes. 500 epochs: 15.8463 hours.\n", + "Epochs: 148. Working time: 5.17 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mommy and dad. she li.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "14240000 Examples seen. Accuracy: 0.8172 Error: 0.52585 Loss: 0.66083 Threads: 8 Forward time: 3.34s Backward time: 4.49s Step time: 37.30s\n", + "14272000 Examples seen. Accuracy: 0.8151 Error: 0.49464 Loss: 0.61435 Threads: 8 Forward time: 3.40s Backward time: 4.24s Step time: 37.70s\n", + "14304000 Examples seen. Accuracy: 0.8154 Error: 0.47836 Loss: 0.54122 Threads: 8 Forward time: 3.46s Backward time: 4.69s Step time: 38.19s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 149 Examples seen:14304000 Validation Accuracy: 0.7926 Validation Error: 0.5456 Validation Loss: 0.7226 Total time: 312.19min\n", + "Epoch time: 1.9094 minutes. 500 epochs: 15.9117 hours.\n", + "Epochs: 149. Working time: 5.20 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she loved to pla.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "14336000 Examples seen. Accuracy: 0.8207 Error: 0.45560 Loss: 0.51984 Threads: 8 Forward time: 3.42s Backward time: 4.54s Step time: 39.57s\n", + "14368000 Examples seen. Accuracy: 0.8220 Error: 0.52202 Loss: 0.65796 Threads: 8 Forward time: 3.30s Backward time: 4.24s Step time: 39.01s\n", + "14400000 Examples seen. Accuracy: 0.8176 Error: 0.48742 Loss: 0.63108 Threads: 8 Forward time: 3.46s Backward time: 4.75s Step time: 39.19s\n", + "Starting Validation.\n", + "Epochs: 150 Examples seen:14400000 Validation Accuracy: 0.7892 Validation Error: 0.5530 Validation Loss: 0.7128 Total time: 314.20min\n", + "Starting Testing.\n", + "Epochs: 150 Examples seen:14400000 Test Accuracy: 0.7892 Test Error: 0.5530 Test Loss: 0.7128 Total time: 314.24min\n", + "Epoch time: 1.9596 minutes. 500 epochs: 16.3304 hours.\n", + "Epochs: 150. Working time: 5.24 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park. she was so excited the stard .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "14432000 Examples seen. Accuracy: 0.8203 Error: 0.53494 Loss: 0.62183 Threads: 8 Forward time: 3.41s Backward time: 4.64s Step time: 39.12s\n", + "14464000 Examples seen. Accuracy: 0.8262 Error: 0.47624 Loss: 0.56688 Threads: 8 Forward time: 3.28s Backward time: 4.39s Step time: 39.00s\n", + "14496000 Examples seen. Accuracy: 0.8206 Error: 0.53666 Loss: 0.64866 Threads: 8 Forward time: 3.50s Backward time: 4.88s Step time: 38.91s\n", + "Starting Validation.\n", + "Epochs: 151 Examples seen:14496000 Validation Accuracy: 0.7913 Validation Error: 0.5436 Validation Loss: 0.7134 Total time: 316.23min\n", + "Epoch time: 1.9457 minutes. 500 epochs: 16.2142 hours.\n", + "Epochs: 151. Working time: 5.27 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they were spice..\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "14528000 Examples seen. Accuracy: 0.8187 Error: 0.45449 Loss: 0.54910 Threads: 8 Forward time: 3.28s Backward time: 4.40s Step time: 38.47s\n", + "14560000 Examples seen. Accuracy: 0.8171 Error: 0.43347 Loss: 0.52197 Threads: 8 Forward time: 3.33s Backward time: 4.52s Step time: 38.29s\n", + "14592000 Examples seen. Accuracy: 0.8158 Error: 0.50850 Loss: 0.58078 Threads: 8 Forward time: 3.54s Backward time: 4.25s Step time: 38.50s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 152 Examples seen:14592000 Validation Accuracy: 0.7944 Validation Error: 0.5422 Validation Loss: 0.7252 Total time: 318.24min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.834 Min Weight: -0.868 Max Output: 0.683 Min Output: -0.701 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.07s Parent:0\n", + "Layer 2 Max Output: 0.683 Min Output: -0.701 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.558 Min Weight: -0.550 Max Output: 2.981 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.09s Parent:2\n", + "Layer 4 Max Output: 2.981 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 2.981 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.292 Min Weight: -0.307 Max Output: 3.724 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.37s 0.23s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.199 Min Weight: -0.227 Max Output: 1.958 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.64s 0.48s Parent:6\n", + "Layer 8 Max Output: 1.958 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.02s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.116 Min Weight: -0.127 Max Output: 1.235 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.75s 0.45s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.202 Min Weight: -0.183 Max Output: 3.333 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.03s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.852 Min Weight: -0.475 Max Output: 11.517 Min Output: -4.135 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.646 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.9248 minutes. 500 epochs: 16.0400 hours.\n", + "Epochs: 152. Working time: 5.30 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sue was three years old and he was three years old a.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "14624000 Examples seen. Accuracy: 0.8154 Error: 0.47431 Loss: 0.61162 Threads: 8 Forward time: 3.47s Backward time: 4.56s Step time: 37.81s\n", + "14656000 Examples seen. Accuracy: 0.8163 Error: 0.51411 Loss: 0.64678 Threads: 8 Forward time: 3.25s Backward time: 4.18s Step time: 37.27s\n", + "14688000 Examples seen. Accuracy: 0.8155 Error: 0.52202 Loss: 0.63529 Threads: 8 Forward time: 3.47s Backward time: 4.70s Step time: 38.13s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 153 Examples seen:14688000 Validation Accuracy: 0.7955 Validation Error: 0.5403 Validation Loss: 0.7125 Total time: 320.21min\n", + "Epoch time: 1.9065 minutes. 500 epochs: 15.8871 hours.\n", + "Epochs: 153. Working time: 5.34 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they wanted to m.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "14720000 Examples seen. Accuracy: 0.8158 Error: 0.48172 Loss: 0.56762 Threads: 8 Forward time: 3.49s Backward time: 4.62s Step time: 37.77s\n", + "14752000 Examples seen. Accuracy: 0.8155 Error: 0.50096 Loss: 0.61719 Threads: 8 Forward time: 3.35s Backward time: 4.42s Step time: 37.46s\n", + "14784000 Examples seen. Accuracy: 0.8141 Error: 0.49034 Loss: 0.58346 Threads: 8 Forward time: 3.20s Backward time: 4.29s Step time: 37.57s\n", + "Starting Validation.\n", + "Epochs: 154 Examples seen:14784000 Validation Accuracy: 0.7915 Validation Error: 0.5390 Validation Loss: 0.7244 Total time: 322.14min\n", + "Epoch time: 1.8786 minutes. 500 epochs: 15.6550 hours.\n", + "Epochs: 154. Working time: 5.37 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom and dad. she was .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "14816000 Examples seen. Accuracy: 0.8135 Error: 0.51257 Loss: 0.62560 Threads: 8 Forward time: 3.42s Backward time: 4.53s Step time: 36.95s\n", + "14848000 Examples seen. Accuracy: 0.8152 Error: 0.55342 Loss: 0.72038 Threads: 8 Forward time: 3.48s Backward time: 4.71s Step time: 37.00s\n", + "14880000 Examples seen. Accuracy: 0.8155 Error: 0.49799 Loss: 0.65286 Threads: 8 Forward time: 3.25s Backward time: 4.20s Step time: 37.22s\n", + "Starting Validation.\n", + "Epochs: 155 Examples seen:14880000 Validation Accuracy: 0.7880 Validation Error: 0.5457 Validation Loss: 0.7275 Total time: 324.03min\n", + "Epoch time: 1.8612 minutes. 500 epochs: 15.5104 hours.\n", + "Epochs: 155. Working time: 5.40 hours.\n", + "Testing.\n", + "once upon a time there was a little girl named lily. she loved to play outside in.\n", + "one day, a little girl named lily went to the park with her mom. she was very spe.\n", + "once upon a time there was a little girl named lily. she loved to play outside in.\n", + "Max prediction pos is: 81\n", + "14912000 Examples seen. Accuracy: 0.8183 Error: 0.47138 Loss: 0.55067 Threads: 8 Forward time: 3.54s Backward time: 4.82s Step time: 37.95s\n", + "14944000 Examples seen. Accuracy: 0.8171 Error: 0.49108 Loss: 0.64986 Threads: 8 Forward time: 3.51s Backward time: 4.72s Step time: 37.51s\n", + "14976000 Examples seen. Accuracy: 0.8175 Error: 0.52125 Loss: 0.61621 Threads: 8 Forward time: 3.29s Backward time: 4.09s Step time: 36.98s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 156 Examples seen:14976000 Validation Accuracy: 0.7988 Validation Error: 0.5310 Validation Loss: 0.7175 Total time: 325.99min\n", + "Epoch time: 1.8488 minutes. 500 epochs: 15.4071 hours.\n", + "Epochs: 156. Working time: 5.43 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big sa.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15008000 Examples seen. Accuracy: 0.8180 Error: 0.48913 Loss: 0.60199 Threads: 8 Forward time: 3.26s Backward time: 4.25s Step time: 36.99s\n", + "15040000 Examples seen. Accuracy: 0.8180 Error: 0.43826 Loss: 0.47880 Threads: 8 Forward time: 3.17s Backward time: 4.39s Step time: 38.52s\n", + "15072000 Examples seen. Accuracy: 0.8194 Error: 0.48649 Loss: 0.64965 Threads: 8 Forward time: 3.32s Backward time: 4.28s Step time: 38.57s\n", + "Starting Validation.\n", + "Epochs: 157 Examples seen:15072000 Validation Accuracy: 0.7967 Validation Error: 0.5437 Validation Loss: 0.7109 Total time: 327.93min\n", + "Epoch time: 1.9286 minutes. 500 epochs: 16.0713 hours.\n", + "Epochs: 157. Working time: 5.47 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big sa.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15104000 Examples seen. Accuracy: 0.8181 Error: 0.48565 Loss: 0.61731 Threads: 8 Forward time: 3.52s Backward time: 4.74s Step time: 37.80s\n", + "15136000 Examples seen. Accuracy: 0.8196 Error: 0.51603 Loss: 0.59415 Threads: 8 Forward time: 3.58s Backward time: 4.55s Step time: 38.73s\n", + "15168000 Examples seen. Accuracy: 0.8181 Error: 0.46829 Loss: 0.55662 Threads: 8 Forward time: 3.74s Backward time: 5.13s Step time: 38.74s\n", + "Starting Validation.\n", + "Epochs: 158 Examples seen:15168000 Validation Accuracy: 0.7919 Validation Error: 0.5446 Validation Loss: 0.7288 Total time: 329.90min\n", + "Epoch time: 1.9371 minutes. 500 epochs: 16.1425 hours.\n", + "Epochs: 158. Working time: 5.50 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big smiled sara wit.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15200000 Examples seen. Accuracy: 0.8208 Error: 0.51075 Loss: 0.59899 Threads: 8 Forward time: 3.42s Backward time: 4.47s Step time: 38.88s\n", + "15232000 Examples seen. Accuracy: 0.8175 Error: 0.49672 Loss: 0.61971 Threads: 8 Forward time: 3.33s Backward time: 4.52s Step time: 38.21s\n", + "15264000 Examples seen. Accuracy: 0.8188 Error: 0.43418 Loss: 0.52724 Threads: 8 Forward time: 3.20s Backward time: 4.18s Step time: 37.09s\n", + "Starting Validation.\n", + "Epochs: 159 Examples seen:15264000 Validation Accuracy: 0.7917 Validation Error: 0.5432 Validation Loss: 0.7088 Total time: 331.85min\n", + "Epoch time: 1.8543 minutes. 500 epochs: 15.4525 hours.\n", + "Epochs: 159. Working time: 5.53 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park. she saw a big box of the stor.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15296000 Examples seen. Accuracy: 0.8192 Error: 0.50242 Loss: 0.63764 Threads: 8 Forward time: 3.27s Backward time: 4.23s Step time: 37.63s\n", + "15328000 Examples seen. Accuracy: 0.8153 Error: 0.55646 Loss: 0.78286 Threads: 8 Forward time: 3.43s Backward time: 4.58s Step time: 37.56s\n", + "15360000 Examples seen. Accuracy: 0.8185 Error: 0.42880 Loss: 0.49212 Threads: 8 Forward time: 3.69s Backward time: 4.97s Step time: 37.16s\n", + "Starting Validation.\n", + "Epochs: 160 Examples seen:15360000 Validation Accuracy: 0.7972 Validation Error: 0.5297 Validation Loss: 0.7089 Total time: 333.77min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.841 Min Weight: -0.873 Max Output: 0.687 Min Output: -0.703 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.687 Min Output: -0.703 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.560 Min Weight: -0.560 Max Output: 3.014 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.014 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.014 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.298 Min Weight: -0.316 Max Output: 3.648 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.39s 0.31s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.204 Min Weight: -0.225 Max Output: 1.999 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.58s 0.53s Parent:6\n", + "Layer 8 Max Output: 1.999 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.08s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.116 Min Weight: -0.132 Max Output: 1.334 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.77s 0.54s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.205 Min Weight: -0.185 Max Output: 3.786 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.03s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.866 Min Weight: -0.480 Max Output: 13.808 Min Output: -4.271 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.914 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 160 Examples seen:15360000 Test Accuracy: 0.7972 Test Error: 0.5297 Test Loss: 0.7089 Total time: 333.81min\n", + "Epoch time: 1.8579 minutes. 500 epochs: 15.4821 hours.\n", + "Epochs: 160. Working time: 5.56 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was three years old and she was a big spice. sh.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15392000 Examples seen. Accuracy: 0.8169 Error: 0.47174 Loss: 0.60853 Threads: 8 Forward time: 3.52s Backward time: 4.57s Step time: 37.40s\n", + "15424000 Examples seen. Accuracy: 0.8201 Error: 0.51170 Loss: 0.69913 Threads: 8 Forward time: 3.43s Backward time: 4.60s Step time: 37.90s\n", + "15456000 Examples seen. Accuracy: 0.8184 Error: 0.52716 Loss: 0.62315 Threads: 8 Forward time: 3.28s Backward time: 4.41s Step time: 37.85s\n", + "Starting Validation.\n", + "Epochs: 161 Examples seen:15456000 Validation Accuracy: 0.7926 Validation Error: 0.5515 Validation Loss: 0.7183 Total time: 335.74min\n", + "Epoch time: 1.8927 minutes. 500 epochs: 15.7729 hours.\n", + "Epochs: 161. Working time: 5.60 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sarah was playing in her garden. she was so excited..\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15488000 Examples seen. Accuracy: 0.8171 Error: 0.52430 Loss: 0.61921 Threads: 8 Forward time: 3.36s Backward time: 4.16s Step time: 37.36s\n", + "15520000 Examples seen. Accuracy: 0.8227 Error: 0.46778 Loss: 0.52729 Threads: 8 Forward time: 3.64s Backward time: 4.57s Step time: 37.36s\n", + "15552000 Examples seen. Accuracy: 0.8208 Error: 0.45337 Loss: 0.54968 Threads: 8 Forward time: 3.51s Backward time: 4.79s Step time: 37.08s\n", + "Starting Validation.\n", + "Epochs: 162 Examples seen:15552000 Validation Accuracy: 0.7936 Validation Error: 0.5385 Validation Loss: 0.7128 Total time: 337.64min\n", + "Epoch time: 1.8538 minutes. 500 epochs: 15.4483 hours.\n", + "Epochs: 162. Working time: 5.63 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park. he was very and had a big tree .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "15584000 Examples seen. Accuracy: 0.8179 Error: 0.49568 Loss: 0.59211 Threads: 8 Forward time: 3.23s Backward time: 4.13s Step time: 36.66s\n", + "15616000 Examples seen. Accuracy: 0.8165 Error: 0.56415 Loss: 0.65956 Threads: 8 Forward time: 3.57s Backward time: 4.66s Step time: 37.19s\n", + "15648000 Examples seen. Accuracy: 0.8177 Error: 0.55040 Loss: 0.72092 Threads: 8 Forward time: 3.61s Backward time: 4.77s Step time: 38.09s\n", + "Starting Validation.\n", + "Epochs: 163 Examples seen:15648000 Validation Accuracy: 0.7955 Validation Error: 0.5349 Validation Loss: 0.7075 Total time: 339.55min\n", + "Epoch time: 1.9046 minutes. 500 epochs: 15.8717 hours.\n", + "Epochs: 163. Working time: 5.66 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park. the sun was so happy and his mo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15680000 Examples seen. Accuracy: 0.8194 Error: 0.40159 Loss: 0.51663 Threads: 8 Forward time: 3.33s Backward time: 4.45s Step time: 37.87s\n", + "15712000 Examples seen. Accuracy: 0.8180 Error: 0.52432 Loss: 0.58975 Threads: 8 Forward time: 3.32s Backward time: 4.40s Step time: 37.89s\n", + "15744000 Examples seen. Accuracy: 0.8172 Error: 0.55457 Loss: 0.64978 Threads: 8 Forward time: 3.14s Backward time: 4.21s Step time: 36.94s\n", + "Starting Validation.\n", + "Epochs: 164 Examples seen:15744000 Validation Accuracy: 0.7955 Validation Error: 0.5397 Validation Loss: 0.7022 Total time: 341.48min\n", + "Epoch time: 1.8469 minutes. 500 epochs: 15.3908 hours.\n", + "Epochs: 164. Working time: 5.69 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she had a specia.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15776000 Examples seen. Accuracy: 0.8181 Error: 0.45349 Loss: 0.57007 Threads: 8 Forward time: 3.45s Backward time: 4.60s Step time: 37.34s\n", + "15808000 Examples seen. Accuracy: 0.8166 Error: 0.55412 Loss: 0.70457 Threads: 8 Forward time: 3.25s Backward time: 4.24s Step time: 36.80s\n", + "15840000 Examples seen. Accuracy: 0.8189 Error: 0.43382 Loss: 0.52699 Threads: 8 Forward time: 3.35s Backward time: 4.39s Step time: 37.12s\n", + "Starting Validation.\n", + "Epochs: 165 Examples seen:15840000 Validation Accuracy: 0.7976 Validation Error: 0.5336 Validation Loss: 0.6956 Total time: 343.37min\n", + "Epoch time: 1.8562 minutes. 500 epochs: 15.4683 hours.\n", + "Epochs: 165. Working time: 5.72 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she was three ye.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "15872000 Examples seen. Accuracy: 0.8198 Error: 0.49450 Loss: 0.70460 Threads: 8 Forward time: 3.75s Backward time: 5.06s Step time: 38.43s\n", + "15904000 Examples seen. Accuracy: 0.8197 Error: 0.50270 Loss: 0.63558 Threads: 8 Forward time: 3.33s Backward time: 4.30s Step time: 37.51s\n", + "15936000 Examples seen. Accuracy: 0.8182 Error: 0.49010 Loss: 0.58202 Threads: 8 Forward time: 3.13s Backward time: 3.99s Step time: 37.30s\n", + "Starting Validation.\n", + "Epochs: 166 Examples seen:15936000 Validation Accuracy: 0.7957 Validation Error: 0.5380 Validation Loss: 0.6979 Total time: 345.30min\n", + "Epoch time: 1.8651 minutes. 500 epochs: 15.5425 hours.\n", + "Epochs: 166. Working time: 5.76 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they wanted to s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "15968000 Examples seen. Accuracy: 0.8226 Error: 0.47787 Loss: 0.57900 Threads: 8 Forward time: 3.81s Backward time: 5.10s Step time: 38.05s\n", + "16000000 Examples seen. Accuracy: 0.8189 Error: 0.49776 Loss: 0.57477 Threads: 8 Forward time: 3.60s Backward time: 4.83s Step time: 37.97s\n", + "16032000 Examples seen. Accuracy: 0.8250 Error: 0.43094 Loss: 0.52801 Threads: 8 Forward time: 3.47s Backward time: 4.72s Step time: 37.89s\n", + "Starting Validation.\n", + "Epochs: 167 Examples seen:16032000 Validation Accuracy: 0.7967 Validation Error: 0.5216 Validation Loss: 0.6988 Total time: 347.25min\n", + "Epoch time: 1.8944 minutes. 500 epochs: 15.7863 hours.\n", + "Epochs: 167. Working time: 5.79 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park. he was three years old and love.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16064000 Examples seen. Accuracy: 0.8242 Error: 0.42663 Loss: 0.51060 Threads: 8 Forward time: 3.42s Backward time: 4.55s Step time: 37.73s\n", + "16096000 Examples seen. Accuracy: 0.8228 Error: 0.44327 Loss: 0.53628 Threads: 8 Forward time: 3.21s Backward time: 4.24s Step time: 37.49s\n", + "16128000 Examples seen. Accuracy: 0.8203 Error: 0.51301 Loss: 0.63125 Threads: 8 Forward time: 3.43s Backward time: 4.59s Step time: 37.76s\n", + "Starting Validation.\n", + "Epochs: 168 Examples seen:16128000 Validation Accuracy: 0.7951 Validation Error: 0.5362 Validation Loss: 0.6966 Total time: 349.17min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.847 Min Weight: -0.880 Max Output: 0.689 Min Output: -0.706 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.689 Min Output: -0.706 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.566 Min Weight: -0.564 Max Output: 3.048 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.08s Parent:2\n", + "Layer 4 Max Output: 3.048 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.048 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.01s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.302 Min Weight: -0.319 Max Output: 3.731 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.30s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.204 Min Weight: -0.224 Max Output: 2.046 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.52s 0.50s Parent:6\n", + "Layer 8 Max Output: 2.046 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.08s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.118 Min Weight: -0.135 Max Output: 1.254 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.73s 0.46s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.212 Min Weight: -0.185 Max Output: 3.482 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.875 Min Weight: -0.487 Max Output: 12.720 Min Output: -4.162 TNNetFullConnectLinear 128,1,1 Times: 0.02s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.769 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8879 minutes. 500 epochs: 15.7321 hours.\n", + "Epochs: 168. Working time: 5.82 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park. he was so happy and she saw a b.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16160000 Examples seen. Accuracy: 0.8181 Error: 0.46700 Loss: 0.61260 Threads: 8 Forward time: 3.74s Backward time: 4.96s Step time: 37.24s\n", + "16192000 Examples seen. Accuracy: 0.8195 Error: 0.45400 Loss: 0.49363 Threads: 8 Forward time: 3.33s Backward time: 4.33s Step time: 37.53s\n", + "16224000 Examples seen. Accuracy: 0.8210 Error: 0.47790 Loss: 0.56335 Threads: 8 Forward time: 3.19s Backward time: 4.04s Step time: 36.93s\n", + "Starting Validation.\n", + "Epochs: 169 Examples seen:16224000 Validation Accuracy: 0.7959 Validation Error: 0.5463 Validation Loss: 0.7018 Total time: 351.08min\n", + "Epoch time: 1.8465 minutes. 500 epochs: 15.3875 hours.\n", + "Epochs: 169. Working time: 5.85 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sky a red be.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16256000 Examples seen. Accuracy: 0.8216 Error: 0.51965 Loss: 0.63280 Threads: 8 Forward time: 3.34s Backward time: 4.41s Step time: 38.17s\n", + "16288000 Examples seen. Accuracy: 0.8181 Error: 0.46519 Loss: 0.54100 Threads: 8 Forward time: 3.34s Backward time: 4.60s Step time: 38.91s\n", + "16320000 Examples seen. Accuracy: 0.8175 Error: 0.56147 Loss: 0.67111 Threads: 8 Forward time: 3.17s Backward time: 4.13s Step time: 39.00s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 170 Examples seen:16320000 Validation Accuracy: 0.7990 Validation Error: 0.5372 Validation Loss: 0.6888 Total time: 353.10min\n", + "Starting Testing.\n", + "Epochs: 170 Examples seen:16320000 Test Accuracy: 0.7990 Test Error: 0.5372 Test Loss: 0.6888 Total time: 353.14min\n", + "Epoch time: 1.9499 minutes. 500 epochs: 16.2492 hours.\n", + "Epochs: 170. Working time: 5.89 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big be.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16352000 Examples seen. Accuracy: 0.8207 Error: 0.44566 Loss: 0.54011 Threads: 8 Forward time: 3.46s Backward time: 4.71s Step time: 37.72s\n", + "16384000 Examples seen. Accuracy: 0.8184 Error: 0.46755 Loss: 0.55350 Threads: 8 Forward time: 3.67s Backward time: 4.94s Step time: 40.06s\n", + "16416000 Examples seen. Accuracy: 0.8196 Error: 0.50368 Loss: 0.68839 Threads: 8 Forward time: 3.36s Backward time: 4.45s Step time: 39.23s\n", + "Starting Validation.\n", + "Epochs: 171 Examples seen:16416000 Validation Accuracy: 0.7970 Validation Error: 0.5337 Validation Loss: 0.6941 Total time: 355.13min\n", + "Epoch time: 1.9616 minutes. 500 epochs: 16.3463 hours.\n", + "Epochs: 171. Working time: 5.92 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she was three an.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "16448000 Examples seen. Accuracy: 0.8222 Error: 0.45512 Loss: 0.53831 Threads: 8 Forward time: 3.35s Backward time: 4.57s Step time: 39.05s\n", + "16480000 Examples seen. Accuracy: 0.8216 Error: 0.47884 Loss: 0.58354 Threads: 8 Forward time: 3.58s Backward time: 4.72s Step time: 39.60s\n", + "16512000 Examples seen. Accuracy: 0.8211 Error: 0.40897 Loss: 0.50626 Threads: 8 Forward time: 3.30s Backward time: 4.22s Step time: 40.38s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 172 Examples seen:16512000 Validation Accuracy: 0.7995 Validation Error: 0.5241 Validation Loss: 0.6851 Total time: 357.20min\n", + "Epoch time: 2.0188 minutes. 500 epochs: 16.8233 hours.\n", + "Epochs: 172. Working time: 5.95 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy was walking in the park. he was a small house with his a bi.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "16544000 Examples seen. Accuracy: 0.8219 Error: 0.52759 Loss: 0.62796 Threads: 8 Forward time: 3.53s Backward time: 4.51s Step time: 38.26s\n", + "16576000 Examples seen. Accuracy: 0.8241 Error: 0.49128 Loss: 0.60878 Threads: 8 Forward time: 3.32s Backward time: 4.44s Step time: 38.63s\n", + "16608000 Examples seen. Accuracy: 0.8234 Error: 0.63504 Loss: 0.80454 Threads: 8 Forward time: 3.64s Backward time: 4.97s Step time: 38.20s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 173 Examples seen:16608000 Validation Accuracy: 0.7997 Validation Error: 0.5250 Validation Loss: 0.6834 Total time: 359.21min\n", + "Epoch time: 1.9099 minutes. 500 epochs: 15.9154 hours.\n", + "Epochs: 173. Working time: 5.99 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. he saw a big box o.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16640000 Examples seen. Accuracy: 0.8225 Error: 0.43253 Loss: 0.53701 Threads: 8 Forward time: 3.33s Backward time: 4.56s Step time: 38.07s\n", + "16672000 Examples seen. Accuracy: 0.8222 Error: 0.49861 Loss: 0.62381 Threads: 8 Forward time: 3.35s Backward time: 4.33s Step time: 37.98s\n", + "16704000 Examples seen. Accuracy: 0.8231 Error: 0.53858 Loss: 0.68441 Threads: 8 Forward time: 3.83s Backward time: 5.03s Step time: 38.81s\n", + "Starting Validation.\n", + "Epochs: 174 Examples seen:16704000 Validation Accuracy: 0.7974 Validation Error: 0.5303 Validation Loss: 0.6889 Total time: 361.17min\n", + "Epoch time: 1.9406 minutes. 500 epochs: 16.1717 hours.\n", + "Epochs: 174. Working time: 6.02 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park. she saw a big slide and saw s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16736000 Examples seen. Accuracy: 0.8201 Error: 0.46644 Loss: 0.56445 Threads: 8 Forward time: 3.56s Backward time: 4.67s Step time: 37.87s\n", + "16768000 Examples seen. Accuracy: 0.8223 Error: 0.54181 Loss: 0.68993 Threads: 8 Forward time: 3.23s Backward time: 4.18s Step time: 37.79s\n", + "16800000 Examples seen. Accuracy: 0.8262 Error: 0.47563 Loss: 0.62623 Threads: 8 Forward time: 3.28s Backward time: 4.28s Step time: 37.55s\n", + "Starting Validation.\n", + "Epochs: 175 Examples seen:16800000 Validation Accuracy: 0.7972 Validation Error: 0.5231 Validation Loss: 0.6860 Total time: 363.10min\n", + "Epoch time: 1.8773 minutes. 500 epochs: 15.6438 hours.\n", + "Epochs: 175. Working time: 6.05 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big br.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16832000 Examples seen. Accuracy: 0.8302 Error: 0.41008 Loss: 0.49949 Threads: 8 Forward time: 3.42s Backward time: 4.43s Step time: 37.40s\n", + "16864000 Examples seen. Accuracy: 0.8276 Error: 0.45521 Loss: 0.55687 Threads: 8 Forward time: 3.42s Backward time: 4.48s Step time: 37.83s\n", + "16896000 Examples seen. Accuracy: 0.8264 Error: 0.45483 Loss: 0.54954 Threads: 8 Forward time: 3.40s Backward time: 4.54s Step time: 37.51s\n", + "Starting Validation.\n", + "Epochs: 176 Examples seen:16896000 Validation Accuracy: 0.7986 Validation Error: 0.5335 Validation Loss: 0.6921 Total time: 365.03min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.852 Min Weight: -0.888 Max Output: 0.692 Min Output: -0.710 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.692 Min Output: -0.710 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.567 Min Weight: -0.576 Max Output: 3.088 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.08s Parent:2\n", + "Layer 4 Max Output: 3.088 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.088 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.311 Min Weight: -0.325 Max Output: 3.621 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.42s 0.29s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.206 Min Weight: -0.223 Max Output: 2.112 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.69s 0.51s Parent:6\n", + "Layer 8 Max Output: 2.112 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.119 Min Weight: -0.141 Max Output: 1.258 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.76s 0.45s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.216 Min Weight: -0.185 Max Output: 3.538 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.886 Min Weight: -0.494 Max Output: 13.213 Min Output: -4.288 TNNetFullConnectLinear 128,1,1 Times: 0.05s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.869 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8756 minutes. 500 epochs: 15.6304 hours.\n", + "Epochs: 176. Working time: 6.08 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy wanted to play with her toy. she saw a big snow.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "16928000 Examples seen. Accuracy: 0.8246 Error: 0.45023 Loss: 0.51150 Threads: 8 Forward time: 3.44s Backward time: 4.57s Step time: 38.97s\n", + "16960000 Examples seen. Accuracy: 0.8281 Error: 0.44634 Loss: 0.50187 Threads: 8 Forward time: 3.51s Backward time: 4.69s Step time: 39.97s\n", + "16992000 Examples seen. Accuracy: 0.8261 Error: 0.51555 Loss: 0.62953 Threads: 8 Forward time: 3.56s Backward time: 4.72s Step time: 38.98s\n", + "Starting Validation.\n", + "Epochs: 177 Examples seen:16992000 Validation Accuracy: 0.7995 Validation Error: 0.5280 Validation Loss: 0.6854 Total time: 367.04min\n", + "Epoch time: 1.9489 minutes. 500 epochs: 16.2408 hours.\n", + "Epochs: 177. Working time: 6.12 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she wanted to pl.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "17024000 Examples seen. Accuracy: 0.8311 Error: 0.47333 Loss: 0.57826 Threads: 8 Forward time: 3.66s Backward time: 4.69s Step time: 39.11s\n", + "17056000 Examples seen. Accuracy: 0.8293 Error: 0.46543 Loss: 0.54092 Threads: 8 Forward time: 3.49s Backward time: 4.57s Step time: 38.67s\n", + "17088000 Examples seen. Accuracy: 0.8261 Error: 0.45529 Loss: 0.54769 Threads: 8 Forward time: 3.46s Backward time: 4.49s Step time: 38.34s\n", + "Starting Validation.\n", + "Epochs: 178 Examples seen:17088000 Validation Accuracy: 0.7976 Validation Error: 0.5284 Validation Loss: 0.6900 Total time: 369.02min\n", + "Epoch time: 1.9172 minutes. 500 epochs: 15.9763 hours.\n", + "Epochs: 178. Working time: 6.15 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom and dad. they wer.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "17120000 Examples seen. Accuracy: 0.8262 Error: 0.41631 Loss: 0.50796 Threads: 8 Forward time: 3.32s Backward time: 4.35s Step time: 38.71s\n", + "17152000 Examples seen. Accuracy: 0.8265 Error: 0.44896 Loss: 0.56142 Threads: 8 Forward time: 3.51s Backward time: 4.49s Step time: 39.43s\n", + "17184000 Examples seen. Accuracy: 0.8237 Error: 0.47271 Loss: 0.59882 Threads: 8 Forward time: 3.31s Backward time: 4.29s Step time: 37.71s\n", + "Starting Validation.\n", + "Epochs: 179 Examples seen:17184000 Validation Accuracy: 0.7955 Validation Error: 0.5320 Validation Loss: 0.6964 Total time: 371.00min\n", + "Epoch time: 1.8856 minutes. 500 epochs: 15.7133 hours.\n", + "Epochs: 179. Working time: 6.18 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big be.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "17216000 Examples seen. Accuracy: 0.8221 Error: 0.48071 Loss: 0.59820 Threads: 8 Forward time: 3.40s Backward time: 4.34s Step time: 38.71s\n", + "17248000 Examples seen. Accuracy: 0.8261 Error: 0.44388 Loss: 0.56942 Threads: 8 Forward time: 3.47s Backward time: 4.50s Step time: 38.91s\n", + "17280000 Examples seen. Accuracy: 0.8310 Error: 0.40250 Loss: 0.47668 Threads: 8 Forward time: 3.42s Backward time: 4.47s Step time: 37.63s\n", + "Starting Validation.\n", + "Epochs: 180 Examples seen:17280000 Validation Accuracy: 0.7953 Validation Error: 0.5153 Validation Loss: 0.6856 Total time: 372.96min\n", + "Starting Testing.\n", + "Epochs: 180 Examples seen:17280000 Test Accuracy: 0.7953 Test Error: 0.5153 Test Loss: 0.6856 Total time: 373.00min\n", + "Epoch time: 1.8816 minutes. 500 epochs: 15.6804 hours.\n", + "Epochs: 180. Working time: 6.22 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park. they were sam and dad with a bi.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "17312000 Examples seen. Accuracy: 0.8268 Error: 0.55956 Loss: 0.72124 Threads: 8 Forward time: 3.22s Backward time: 4.17s Step time: 36.85s\n", + "17344000 Examples seen. Accuracy: 0.8229 Error: 0.45926 Loss: 0.59775 Threads: 8 Forward time: 3.55s Backward time: 4.46s Step time: 38.24s\n", + "17376000 Examples seen. Accuracy: 0.8215 Error: 0.45518 Loss: 0.53333 Threads: 8 Forward time: 3.43s Backward time: 4.57s Step time: 37.44s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 181 Examples seen:17376000 Validation Accuracy: 0.8036 Validation Error: 0.5308 Validation Loss: 0.6752 Total time: 374.97min\n", + "Epoch time: 1.8721 minutes. 500 epochs: 15.6004 hours.\n", + "Epochs: 181. Working time: 6.25 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she was three ye.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "17408000 Examples seen. Accuracy: 0.8224 Error: 0.45778 Loss: 0.57519 Threads: 8 Forward time: 3.13s Backward time: 3.99s Step time: 37.57s\n", + "17440000 Examples seen. Accuracy: 0.8211 Error: 0.51511 Loss: 0.68127 Threads: 8 Forward time: 3.34s Backward time: 4.40s Step time: 37.83s\n", + "17472000 Examples seen. Accuracy: 0.8222 Error: 0.50567 Loss: 0.67220 Threads: 8 Forward time: 3.65s Backward time: 4.65s Step time: 36.98s\n", + "Starting Validation.\n", + "Epochs: 182 Examples seen:17472000 Validation Accuracy: 0.7995 Validation Error: 0.5217 Validation Loss: 0.6945 Total time: 376.88min\n", + "Epoch time: 1.8490 minutes. 500 epochs: 15.4083 hours.\n", + "Epochs: 182. Working time: 6.28 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she was the park.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "17504000 Examples seen. Accuracy: 0.8226 Error: 0.56932 Loss: 0.72448 Threads: 8 Forward time: 3.25s Backward time: 4.12s Step time: 36.83s\n", + "17536000 Examples seen. Accuracy: 0.8231 Error: 0.44564 Loss: 0.54208 Threads: 8 Forward time: 3.51s Backward time: 4.60s Step time: 37.37s\n", + "17568000 Examples seen. Accuracy: 0.8309 Error: 0.40686 Loss: 0.49392 Threads: 8 Forward time: 3.30s Backward time: 4.17s Step time: 36.62s\n", + "Starting Validation.\n", + "Epochs: 183 Examples seen:17568000 Validation Accuracy: 0.7999 Validation Error: 0.5167 Validation Loss: 0.6965 Total time: 378.78min\n", + "Epoch time: 1.8312 minutes. 500 epochs: 15.2604 hours.\n", + "Epochs: 183. Working time: 6.31 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "17600000 Examples seen. Accuracy: 0.8281 Error: 0.49138 Loss: 0.59746 Threads: 8 Forward time: 3.23s Backward time: 4.24s Step time: 37.03s\n", + "17632000 Examples seen. Accuracy: 0.8277 Error: 0.44791 Loss: 0.54956 Threads: 8 Forward time: 3.57s Backward time: 4.61s Step time: 37.83s\n", + "17664000 Examples seen. Accuracy: 0.8252 Error: 0.47092 Loss: 0.58658 Threads: 8 Forward time: 3.41s Backward time: 4.58s Step time: 39.83s\n", + "Starting Validation.\n", + "Epochs: 184 Examples seen:17664000 Validation Accuracy: 0.8005 Validation Error: 0.5217 Validation Loss: 0.6781 Total time: 380.74min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.862 Min Weight: -0.895 Max Output: 0.697 Min Output: -0.714 TNNetPointwiseConv 81,1,32 Times: 0.21s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.697 Min Output: -0.714 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.573 Min Weight: -0.578 Max Output: 3.144 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.144 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.144 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.311 Min Weight: -0.332 Max Output: 3.733 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.38s 0.29s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.207 Min Weight: -0.223 Max Output: 2.290 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.69s 0.49s Parent:6\n", + "Layer 8 Max Output: 2.290 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.121 Min Weight: -0.145 Max Output: 1.370 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.81s 0.47s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.219 Min Weight: -0.185 Max Output: 3.652 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.899 Min Weight: -0.502 Max Output: 13.138 Min Output: -4.167 TNNetFullConnectLinear 128,1,1 Times: 0.05s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.851 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.9916 minutes. 500 epochs: 16.5963 hours.\n", + "Epochs: 184. Working time: 6.35 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big b.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "17696000 Examples seen. Accuracy: 0.8250 Error: 0.48533 Loss: 0.61532 Threads: 8 Forward time: 3.85s Backward time: 5.00s Step time: 41.09s\n", + "17728000 Examples seen. Accuracy: 0.8238 Error: 0.53709 Loss: 0.67631 Threads: 8 Forward time: 3.94s Backward time: 5.06s Step time: 40.34s\n", + "17760000 Examples seen. Accuracy: 0.8291 Error: 0.45492 Loss: 0.60554 Threads: 8 Forward time: 3.61s Backward time: 4.66s Step time: 41.18s\n", + "Starting Validation.\n", + "Epochs: 185 Examples seen:17760000 Validation Accuracy: 0.8011 Validation Error: 0.5164 Validation Loss: 0.6831 Total time: 382.83min\n", + "Epoch time: 2.0590 minutes. 500 epochs: 17.1587 hours.\n", + "Epochs: 185. Working time: 6.38 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy went to the park. she was a big because it was .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "17792000 Examples seen. Accuracy: 0.8364 Error: 0.47374 Loss: 0.57190 Threads: 8 Forward time: 3.53s Backward time: 4.63s Step time: 39.92s\n", + "17824000 Examples seen. Accuracy: 0.8312 Error: 0.46945 Loss: 0.59145 Threads: 8 Forward time: 3.70s Backward time: 4.99s Step time: 40.37s\n", + "17856000 Examples seen. Accuracy: 0.8281 Error: 0.51840 Loss: 0.66423 Threads: 8 Forward time: 3.69s Backward time: 5.05s Step time: 40.57s\n", + "Starting Validation.\n", + "Epochs: 186 Examples seen:17856000 Validation Accuracy: 0.7961 Validation Error: 0.5223 Validation Loss: 0.6931 Total time: 384.90min\n", + "Epoch time: 2.0285 minutes. 500 epochs: 16.9038 hours.\n", + "Epochs: 186. Working time: 6.41 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big ba.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "17888000 Examples seen. Accuracy: 0.8333 Error: 0.43355 Loss: 0.51204 Threads: 8 Forward time: 3.38s Backward time: 4.41s Step time: 41.43s\n", + "17920000 Examples seen. Accuracy: 0.8309 Error: 0.44810 Loss: 0.54409 Threads: 8 Forward time: 3.68s Backward time: 4.96s Step time: 40.57s\n", + "17952000 Examples seen. Accuracy: 0.8251 Error: 0.45541 Loss: 0.54401 Threads: 8 Forward time: 4.16s Backward time: 5.53s Step time: 41.52s\n", + "Starting Validation.\n", + "Epochs: 187 Examples seen:17952000 Validation Accuracy: 0.8001 Validation Error: 0.5308 Validation Loss: 0.6822 Total time: 387.01min\n", + "Epoch time: 2.0760 minutes. 500 epochs: 17.3000 hours.\n", + "Epochs: 187. Working time: 6.45 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big sa.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "17984000 Examples seen. Accuracy: 0.8252 Error: 0.43521 Loss: 0.51880 Threads: 8 Forward time: 3.55s Backward time: 4.63s Step time: 40.51s\n", + "18016000 Examples seen. Accuracy: 0.8264 Error: 0.44453 Loss: 0.50850 Threads: 8 Forward time: 3.90s Backward time: 5.30s Step time: 41.27s\n", + "18048000 Examples seen. Accuracy: 0.8359 Error: 0.48485 Loss: 0.55845 Threads: 8 Forward time: 3.94s Backward time: 5.26s Step time: 40.73s\n", + "Starting Validation.\n", + "Epochs: 188 Examples seen:18048000 Validation Accuracy: 0.8026 Validation Error: 0.5170 Validation Loss: 0.6728 Total time: 389.10min\n", + "Epoch time: 2.0364 minutes. 500 epochs: 16.9700 hours.\n", + "Epochs: 188. Working time: 6.49 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom and dad. she was .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "18080000 Examples seen. Accuracy: 0.8315 Error: 0.46469 Loss: 0.56591 Threads: 8 Forward time: 3.47s Backward time: 4.45s Step time: 39.90s\n", + "18112000 Examples seen. Accuracy: 0.8273 Error: 0.44969 Loss: 0.52938 Threads: 8 Forward time: 3.16s Backward time: 4.09s Step time: 39.21s\n", + "18144000 Examples seen. Accuracy: 0.8252 Error: 0.47480 Loss: 0.53560 Threads: 8 Forward time: 3.49s Backward time: 4.55s Step time: 39.02s\n", + "Starting Validation.\n", + "Epochs: 189 Examples seen:18144000 Validation Accuracy: 0.7949 Validation Error: 0.5281 Validation Loss: 0.6783 Total time: 391.12min\n", + "Epoch time: 1.9508 minutes. 500 epochs: 16.2571 hours.\n", + "Epochs: 189. Working time: 6.52 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was playing in the park. she saw a big shiny an.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "18176000 Examples seen. Accuracy: 0.8365 Error: 0.45654 Loss: 0.53510 Threads: 8 Forward time: 3.40s Backward time: 4.51s Step time: 38.27s\n", + "18208000 Examples seen. Accuracy: 0.8365 Error: 0.39616 Loss: 0.48018 Threads: 8 Forward time: 3.21s Backward time: 4.09s Step time: 38.84s\n", + "18240000 Examples seen. Accuracy: 0.8428 Error: 0.31764 Loss: 0.31535 Threads: 8 Forward time: 4.11s Backward time: 5.39s Step time: 38.30s\n", + "Starting Validation.\n", + "Epochs: 190 Examples seen:18240000 Validation Accuracy: 0.7978 Validation Error: 0.5093 Validation Loss: 0.6776 Total time: 393.09min\n", + "Starting Testing.\n", + "Epochs: 190 Examples seen:18240000 Test Accuracy: 0.7978 Test Error: 0.5093 Test Loss: 0.6776 Total time: 393.13min\n", + "Epoch time: 1.9148 minutes. 500 epochs: 15.9571 hours.\n", + "Epochs: 190. Working time: 6.55 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park. she saw a big bed something i.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "18272000 Examples seen. Accuracy: 0.8312 Error: 0.47636 Loss: 0.58555 Threads: 8 Forward time: 3.53s Backward time: 4.79s Step time: 39.38s\n", + "18304000 Examples seen. Accuracy: 0.8256 Error: 0.55286 Loss: 0.68610 Threads: 8 Forward time: 3.68s Backward time: 4.97s Step time: 38.55s\n", + "18336000 Examples seen. Accuracy: 0.8233 Error: 0.46082 Loss: 0.61711 Threads: 8 Forward time: 3.61s Backward time: 4.74s Step time: 38.30s\n", + "Starting Validation.\n", + "Epochs: 191 Examples seen:18336000 Validation Accuracy: 0.8003 Validation Error: 0.5241 Validation Loss: 0.6840 Total time: 395.11min\n", + "Epoch time: 1.9150 minutes. 500 epochs: 15.9583 hours.\n", + "Epochs: 191. Working time: 6.59 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy was playing with her toys. she had a big box of.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "18368000 Examples seen. Accuracy: 0.8237 Error: 0.45193 Loss: 0.51989 Threads: 8 Forward time: 3.26s Backward time: 4.19s Step time: 37.71s\n", + "18400000 Examples seen. Accuracy: 0.8221 Error: 0.49998 Loss: 0.58653 Threads: 8 Forward time: 3.90s Backward time: 4.76s Step time: 37.89s\n", + "18432000 Examples seen. Accuracy: 0.8226 Error: 0.47614 Loss: 0.58580 Threads: 8 Forward time: 3.48s Backward time: 4.58s Step time: 37.71s\n", + "Starting Validation.\n", + "Epochs: 192 Examples seen:18432000 Validation Accuracy: 0.8003 Validation Error: 0.5204 Validation Loss: 0.6834 Total time: 397.04min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.870 Min Weight: -0.904 Max Output: 0.702 Min Output: -0.718 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.11s Parent:0\n", + "Layer 2 Max Output: 0.702 Min Output: -0.718 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.576 Min Weight: -0.585 Max Output: 3.160 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.13s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.160 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.160 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.321 Min Weight: -0.335 Max Output: 3.787 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.31s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.206 Min Weight: -0.224 Max Output: 2.298 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.58s 0.49s Parent:6\n", + "Layer 8 Max Output: 2.298 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.123 Min Weight: -0.145 Max Output: 1.317 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.73s 0.44s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.223 Min Weight: -0.188 Max Output: 3.303 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.908 Min Weight: -0.509 Max Output: 13.563 Min Output: -3.919 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.920 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8855 minutes. 500 epochs: 15.7129 hours.\n", + "Epochs: 192. Working time: 6.62 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom and dad. the sky .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "18464000 Examples seen. Accuracy: 0.8302 Error: 0.56024 Loss: 0.71659 Threads: 8 Forward time: 3.16s Backward time: 4.09s Step time: 37.47s\n", + "18496000 Examples seen. Accuracy: 0.8273 Error: 0.46917 Loss: 0.55211 Threads: 8 Forward time: 4.04s Backward time: 5.10s Step time: 37.53s\n", + "18528000 Examples seen. Accuracy: 0.8262 Error: 0.42995 Loss: 0.47852 Threads: 8 Forward time: 3.24s Backward time: 4.37s Step time: 38.90s\n", + "Starting Validation.\n", + "Epochs: 193 Examples seen:18528000 Validation Accuracy: 0.7997 Validation Error: 0.5133 Validation Loss: 0.6780 Total time: 398.99min\n", + "Epoch time: 1.9448 minutes. 500 epochs: 16.2067 hours.\n", + "Epochs: 193. Working time: 6.65 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they saw a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "18560000 Examples seen. Accuracy: 0.8265 Error: 0.49067 Loss: 0.63791 Threads: 8 Forward time: 3.24s Backward time: 4.05s Step time: 37.15s\n", + "18592000 Examples seen. Accuracy: 0.8243 Error: 0.49073 Loss: 0.57090 Threads: 8 Forward time: 3.51s Backward time: 4.68s Step time: 38.11s\n", + "18624000 Examples seen. Accuracy: 0.8255 Error: 0.50049 Loss: 0.60728 Threads: 8 Forward time: 3.39s Backward time: 4.37s Step time: 39.27s\n", + "Starting Validation.\n", + "Epochs: 194 Examples seen:18624000 Validation Accuracy: 0.8022 Validation Error: 0.5304 Validation Loss: 0.6794 Total time: 400.94min\n", + "Epoch time: 1.9635 minutes. 500 epochs: 16.3625 hours.\n", + "Epochs: 194. Working time: 6.68 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sarah was walking in the park. she saw a big pillow .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "18656000 Examples seen. Accuracy: 0.8234 Error: 0.43292 Loss: 0.50918 Threads: 8 Forward time: 3.34s Backward time: 4.32s Step time: 39.32s\n", + "18688000 Examples seen. Accuracy: 0.8247 Error: 0.49547 Loss: 0.60573 Threads: 8 Forward time: 3.42s Backward time: 4.35s Step time: 39.44s\n", + "18720000 Examples seen. Accuracy: 0.8247 Error: 0.48922 Loss: 0.67352 Threads: 8 Forward time: 3.67s Backward time: 4.93s Step time: 39.59s\n", + "Starting Validation.\n", + "Epochs: 195 Examples seen:18720000 Validation Accuracy: 0.8036 Validation Error: 0.5112 Validation Loss: 0.6832 Total time: 402.96min\n", + "Epoch time: 1.9794 minutes. 500 epochs: 16.4950 hours.\n", + "Epochs: 195. Working time: 6.72 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "18752000 Examples seen. Accuracy: 0.8246 Error: 0.45507 Loss: 0.52628 Threads: 8 Forward time: 3.69s Backward time: 4.86s Step time: 39.65s\n", + "18784000 Examples seen. Accuracy: 0.8242 Error: 0.45329 Loss: 0.50864 Threads: 8 Forward time: 3.25s Backward time: 4.13s Step time: 39.10s\n", + "18816000 Examples seen. Accuracy: 0.8244 Error: 0.45837 Loss: 0.59518 Threads: 8 Forward time: 3.74s Backward time: 4.96s Step time: 38.63s\n", + "Starting Validation.\n", + "Epochs: 196 Examples seen:18816000 Validation Accuracy: 0.8018 Validation Error: 0.5260 Validation Loss: 0.6784 Total time: 404.97min\n", + "Epoch time: 1.9316 minutes. 500 epochs: 16.0967 hours.\n", + "Epochs: 196. Working time: 6.75 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sue went to the park with her mom. the sun was so ex.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "18848000 Examples seen. Accuracy: 0.8326 Error: 0.42486 Loss: 0.48271 Threads: 8 Forward time: 3.28s Backward time: 4.08s Step time: 38.44s\n", + "18880000 Examples seen. Accuracy: 0.8334 Error: 0.49245 Loss: 0.55894 Threads: 8 Forward time: 3.85s Backward time: 5.07s Step time: 38.83s\n", + "18912000 Examples seen. Accuracy: 0.8341 Error: 0.52919 Loss: 0.69866 Threads: 8 Forward time: 3.68s Backward time: 4.54s Step time: 38.91s\n", + "Starting Validation.\n", + "Epochs: 197 Examples seen:18912000 Validation Accuracy: 0.8015 Validation Error: 0.5046 Validation Loss: 0.6784 Total time: 406.95min\n", + "Epoch time: 1.9455 minutes. 500 epochs: 16.2125 hours.\n", + "Epochs: 197. Working time: 6.78 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim was very hungry. he was very happy and he wanted .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "18944000 Examples seen. Accuracy: 0.8332 Error: 0.47936 Loss: 0.54451 Threads: 8 Forward time: 3.47s Backward time: 4.53s Step time: 37.66s\n", + "18976000 Examples seen. Accuracy: 0.8396 Error: 0.43213 Loss: 0.55284 Threads: 8 Forward time: 3.35s Backward time: 4.41s Step time: 38.02s\n", + "19008000 Examples seen. Accuracy: 0.8351 Error: 0.39532 Loss: 0.45488 Threads: 8 Forward time: 3.53s Backward time: 4.64s Step time: 38.43s\n", + "Starting Validation.\n", + "Epochs: 198 Examples seen:19008000 Validation Accuracy: 0.8036 Validation Error: 0.5168 Validation Loss: 0.6882 Total time: 408.90min\n", + "Epoch time: 1.9214 minutes. 500 epochs: 16.0113 hours.\n", + "Epochs: 198. Working time: 6.81 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were somethin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "19040000 Examples seen. Accuracy: 0.8301 Error: 0.44511 Loss: 0.53706 Threads: 8 Forward time: 3.27s Backward time: 4.16s Step time: 38.57s\n", + "19072000 Examples seen. Accuracy: 0.8287 Error: 0.44239 Loss: 0.51708 Threads: 8 Forward time: 3.44s Backward time: 4.45s Step time: 38.24s\n", + "19104000 Examples seen. Accuracy: 0.8275 Error: 0.48969 Loss: 0.59541 Threads: 8 Forward time: 3.82s Backward time: 4.97s Step time: 38.56s\n", + "Starting Validation.\n", + "Epochs: 199 Examples seen:19104000 Validation Accuracy: 0.8026 Validation Error: 0.5210 Validation Loss: 0.6767 Total time: 410.87min\n", + "Epoch time: 1.9279 minutes. 500 epochs: 16.0654 hours.\n", + "Epochs: 199. Working time: 6.85 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. he saw a big box o.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "19136000 Examples seen. Accuracy: 0.8272 Error: 0.50576 Loss: 0.65329 Threads: 8 Forward time: 3.68s Backward time: 4.73s Step time: 37.88s\n", + "19168000 Examples seen. Accuracy: 0.8252 Error: 0.46445 Loss: 0.55414 Threads: 8 Forward time: 3.22s Backward time: 4.13s Step time: 38.50s\n", + "19200000 Examples seen. Accuracy: 0.8249 Error: 0.50105 Loss: 0.64246 Threads: 8 Forward time: 4.05s Backward time: 5.30s Step time: 37.50s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 200 Examples seen:19200000 Validation Accuracy: 0.8040 Validation Error: 0.5133 Validation Loss: 0.6614 Total time: 412.85min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.874 Min Weight: -0.905 Max Output: 0.703 Min Output: -0.719 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.703 Min Output: -0.719 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.586 Min Weight: -0.584 Max Output: 3.238 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.08s Parent:2\n", + "Layer 4 Max Output: 3.238 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.02s Parent:3\n", + "Layer 5 Max Output: 3.238 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.317 Min Weight: -0.340 Max Output: 3.749 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.33s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.205 Min Weight: -0.228 Max Output: 2.243 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.76s 0.59s Parent:6\n", + "Layer 8 Max Output: 2.243 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.123 Min Weight: -0.148 Max Output: 1.264 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.85s 0.54s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.225 Min Weight: -0.187 Max Output: 3.451 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.06s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.915 Min Weight: -0.515 Max Output: 12.184 Min Output: -3.913 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.633 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 200 Examples seen:19200000 Test Accuracy: 0.8040 Test Error: 0.5133 Test Loss: 0.6614 Total time: 412.89min\n", + "Epoch time: 1.8748 minutes. 500 epochs: 15.6233 hours.\n", + "Epochs: 200. Working time: 6.88 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they wanted to g.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "19232000 Examples seen. Accuracy: 0.8244 Error: 0.44549 Loss: 0.46377 Threads: 8 Forward time: 3.28s Backward time: 4.11s Step time: 36.80s\n", + "19264000 Examples seen. Accuracy: 0.8234 Error: 0.51752 Loss: 0.68564 Threads: 8 Forward time: 3.86s Backward time: 5.04s Step time: 36.97s\n", + "19296000 Examples seen. Accuracy: 0.8264 Error: 0.44505 Loss: 0.53428 Threads: 8 Forward time: 3.50s Backward time: 4.58s Step time: 37.63s\n", + "Starting Validation.\n", + "Epochs: 201 Examples seen:19296000 Validation Accuracy: 0.7992 Validation Error: 0.5112 Validation Loss: 0.6818 Total time: 414.79min\n", + "Epoch time: 1.8813 minutes. 500 epochs: 15.6779 hours.\n", + "Epochs: 201. Working time: 6.91 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went for a walk in the park. she saw a big tree.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "19328000 Examples seen. Accuracy: 0.8282 Error: 0.40968 Loss: 0.47457 Threads: 8 Forward time: 3.34s Backward time: 4.22s Step time: 37.52s\n", + "19360000 Examples seen. Accuracy: 0.8268 Error: 0.46672 Loss: 0.56613 Threads: 8 Forward time: 3.56s Backward time: 4.41s Step time: 37.25s\n", + "19392000 Examples seen. Accuracy: 0.8295 Error: 0.47275 Loss: 0.58445 Threads: 8 Forward time: 3.51s Backward time: 4.51s Step time: 37.35s\n", + "Starting Validation.\n", + "Epochs: 202 Examples seen:19392000 Validation Accuracy: 0.8020 Validation Error: 0.5205 Validation Loss: 0.6715 Total time: 416.71min\n", + "Epoch time: 1.8677 minutes. 500 epochs: 15.5642 hours.\n", + "Epochs: 202. Working time: 6.95 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy was playing in the park. she was three years ol.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "19424000 Examples seen. Accuracy: 0.8278 Error: 0.45825 Loss: 0.55561 Threads: 8 Forward time: 3.37s Backward time: 4.49s Step time: 37.76s\n", + "19456000 Examples seen. Accuracy: 0.8262 Error: 0.44791 Loss: 0.52131 Threads: 8 Forward time: 3.25s Backward time: 4.25s Step time: 36.67s\n", + "19488000 Examples seen. Accuracy: 0.8288 Error: 0.47948 Loss: 0.63790 Threads: 8 Forward time: 3.19s Backward time: 4.02s Step time: 36.33s\n", + "Starting Validation.\n", + "Epochs: 203 Examples seen:19488000 Validation Accuracy: 0.8040 Validation Error: 0.5214 Validation Loss: 0.6656 Total time: 418.60min\n", + "Epoch time: 1.8163 minutes. 500 epochs: 15.1358 hours.\n", + "Epochs: 203. Working time: 6.98 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they wanted to s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "19520000 Examples seen. Accuracy: 0.8271 Error: 0.49578 Loss: 0.65411 Threads: 8 Forward time: 3.30s Backward time: 4.20s Step time: 36.45s\n", + "19552000 Examples seen. Accuracy: 0.8258 Error: 0.42902 Loss: 0.49056 Threads: 8 Forward time: 3.93s Backward time: 5.13s Step time: 37.65s\n", + "19584000 Examples seen. Accuracy: 0.8320 Error: 0.41145 Loss: 0.43978 Threads: 8 Forward time: 3.37s Backward time: 4.41s Step time: 39.90s\n", + "Starting Validation.\n", + "Epochs: 204 Examples seen:19584000 Validation Accuracy: 0.8003 Validation Error: 0.4981 Validation Loss: 0.6668 Total time: 420.54min\n", + "Epoch time: 1.9948 minutes. 500 epochs: 16.6229 hours.\n", + "Epochs: 204. Working time: 7.01 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named mia went to the park with her mom. they saw a big st.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "19616000 Examples seen. Accuracy: 0.8388 Error: 0.42388 Loss: 0.51903 Threads: 8 Forward time: 3.54s Backward time: 4.58s Step time: 40.14s\n", + "19648000 Examples seen. Accuracy: 0.8334 Error: 0.41764 Loss: 0.50761 Threads: 8 Forward time: 3.82s Backward time: 4.85s Step time: 40.20s\n", + "19680000 Examples seen. Accuracy: 0.8340 Error: 0.42853 Loss: 0.45680 Threads: 8 Forward time: 3.55s Backward time: 4.58s Step time: 39.16s\n", + "Starting Validation.\n", + "Epochs: 205 Examples seen:19680000 Validation Accuracy: 0.8020 Validation Error: 0.5095 Validation Loss: 0.6682 Total time: 422.58min\n", + "Epoch time: 1.9581 minutes. 500 epochs: 16.3179 hours.\n", + "Epochs: 205. Working time: 7.04 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they were so exc.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "19712000 Examples seen. Accuracy: 0.8408 Error: 0.41070 Loss: 0.47873 Threads: 8 Forward time: 4.00s Backward time: 5.09s Step time: 40.30s\n", + "19744000 Examples seen. Accuracy: 0.8356 Error: 0.45674 Loss: 0.47873 Threads: 8 Forward time: 3.35s Backward time: 4.23s Step time: 39.20s\n", + "19776000 Examples seen. Accuracy: 0.8320 Error: 0.46125 Loss: 0.53780 Threads: 8 Forward time: 3.73s Backward time: 4.88s Step time: 39.14s\n", + "Starting Validation.\n", + "Epochs: 206 Examples seen:19776000 Validation Accuracy: 0.8026 Validation Error: 0.5104 Validation Loss: 0.6586 Total time: 424.61min\n", + "Epoch time: 1.9572 minutes. 500 epochs: 16.3100 hours.\n", + "Epochs: 206. Working time: 7.08 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big b.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "19808000 Examples seen. Accuracy: 0.8304 Error: 0.36456 Loss: 0.39924 Threads: 8 Forward time: 3.19s Backward time: 3.97s Step time: 38.92s\n", + "19840000 Examples seen. Accuracy: 0.8370 Error: 0.40625 Loss: 0.45337 Threads: 8 Forward time: 3.46s Backward time: 4.50s Step time: 38.75s\n", + "19872000 Examples seen. Accuracy: 0.8420 Error: 0.42814 Loss: 0.54972 Threads: 8 Forward time: 3.37s Backward time: 4.43s Step time: 38.17s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 207 Examples seen:19872000 Validation Accuracy: 0.8047 Validation Error: 0.4956 Validation Loss: 0.6673 Total time: 426.63min\n", + "Epoch time: 1.9087 minutes. 500 epochs: 15.9062 hours.\n", + "Epochs: 207. Working time: 7.11 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mom and dad. she was .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "19904000 Examples seen. Accuracy: 0.8348 Error: 0.51899 Loss: 0.61627 Threads: 8 Forward time: 3.12s Backward time: 4.05s Step time: 38.00s\n", + "19936000 Examples seen. Accuracy: 0.8307 Error: 0.50018 Loss: 0.68055 Threads: 8 Forward time: 3.15s Backward time: 4.06s Step time: 37.07s\n", + "19968000 Examples seen. Accuracy: 0.8396 Error: 0.41346 Loss: 0.47481 Threads: 8 Forward time: 3.27s Backward time: 4.09s Step time: 37.39s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 208 Examples seen:19968000 Validation Accuracy: 0.8059 Validation Error: 0.4961 Validation Loss: 0.6559 Total time: 428.59min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.879 Min Weight: -0.909 Max Output: 0.706 Min Output: -0.721 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.706 Min Output: -0.721 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.595 Min Weight: -0.586 Max Output: 3.288 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.06s Parent:2\n", + "Layer 4 Max Output: 3.288 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.00s Parent:3\n", + "Layer 5 Max Output: 3.288 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.322 Min Weight: -0.341 Max Output: 3.566 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.35s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.207 Min Weight: -0.225 Max Output: 2.359 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.56s 0.44s Parent:6\n", + "Layer 8 Max Output: 2.359 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.121 Min Weight: -0.153 Max Output: 1.550 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.76s 0.42s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.229 Min Weight: -0.187 Max Output: 4.012 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.923 Min Weight: -0.522 Max Output: 15.812 Min Output: -4.473 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.951 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8697 minutes. 500 epochs: 15.5812 hours.\n", + "Epochs: 208. Working time: 7.14 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park. she saw a big streat with her.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20000000 Examples seen. Accuracy: 0.8346 Error: 0.45848 Loss: 0.54923 Threads: 8 Forward time: 3.15s Backward time: 4.06s Step time: 37.42s\n", + "20032000 Examples seen. Accuracy: 0.8311 Error: 0.44281 Loss: 0.53645 Threads: 8 Forward time: 3.41s Backward time: 4.44s Step time: 36.82s\n", + "20064000 Examples seen. Accuracy: 0.8297 Error: 0.45464 Loss: 0.54811 Threads: 8 Forward time: 3.38s Backward time: 4.36s Step time: 37.19s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 209 Examples seen:20064000 Validation Accuracy: 0.8074 Validation Error: 0.5046 Validation Loss: 0.6593 Total time: 430.53min\n", + "Epoch time: 1.8593 minutes. 500 epochs: 15.4942 hours.\n", + "Epochs: 209. Working time: 7.18 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in the park. she was very excited t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20096000 Examples seen. Accuracy: 0.8274 Error: 0.55184 Loss: 0.69427 Threads: 8 Forward time: 3.44s Backward time: 4.39s Step time: 37.91s\n", + "20128000 Examples seen. Accuracy: 0.8263 Error: 0.39366 Loss: 0.41483 Threads: 8 Forward time: 3.69s Backward time: 4.88s Step time: 38.54s\n", + "20160000 Examples seen. Accuracy: 0.8272 Error: 0.46300 Loss: 0.57881 Threads: 8 Forward time: 3.14s Backward time: 4.05s Step time: 38.67s\n", + "Starting Validation.\n", + "Epochs: 210 Examples seen:20160000 Validation Accuracy: 0.8051 Validation Error: 0.5136 Validation Loss: 0.6725 Total time: 432.49min\n", + "Starting Testing.\n", + "Epochs: 210 Examples seen:20160000 Test Accuracy: 0.8051 Test Error: 0.5136 Test Loss: 0.6725 Total time: 432.53min\n", + "Epoch time: 1.9337 minutes. 500 epochs: 16.1146 hours.\n", + "Epochs: 210. Working time: 7.21 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20192000 Examples seen. Accuracy: 0.8297 Error: 0.49001 Loss: 0.57781 Threads: 8 Forward time: 3.52s Backward time: 4.68s Step time: 38.15s\n", + "20224000 Examples seen. Accuracy: 0.8330 Error: 0.40421 Loss: 0.44617 Threads: 8 Forward time: 3.83s Backward time: 5.11s Step time: 37.89s\n", + "20256000 Examples seen. Accuracy: 0.8355 Error: 0.45363 Loss: 0.55477 Threads: 8 Forward time: 3.35s Backward time: 4.32s Step time: 37.10s\n", + "Starting Validation.\n", + "Epochs: 211 Examples seen:20256000 Validation Accuracy: 0.8015 Validation Error: 0.5134 Validation Loss: 0.6770 Total time: 434.46min\n", + "Epoch time: 1.8551 minutes. 500 epochs: 15.4592 hours.\n", + "Epochs: 211. Working time: 7.24 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily wanted to play with her toys. she had a big sca.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "20288000 Examples seen. Accuracy: 0.8433 Error: 0.40573 Loss: 0.52642 Threads: 8 Forward time: 3.47s Backward time: 4.49s Step time: 38.26s\n", + "20320000 Examples seen. Accuracy: 0.8441 Error: 0.40936 Loss: 0.46022 Threads: 8 Forward time: 3.81s Backward time: 4.76s Step time: 38.81s\n", + "20352000 Examples seen. Accuracy: 0.8337 Error: 0.45987 Loss: 0.59619 Threads: 8 Forward time: 3.59s Backward time: 4.54s Step time: 38.07s\n", + "Starting Validation.\n", + "Epochs: 212 Examples seen:20352000 Validation Accuracy: 0.8063 Validation Error: 0.5211 Validation Loss: 0.6646 Total time: 436.42min\n", + "Epoch time: 1.9036 minutes. 500 epochs: 15.8633 hours.\n", + "Epochs: 212. Working time: 7.27 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was walking in the park. she was very sad and h.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20384000 Examples seen. Accuracy: 0.8371 Error: 0.47298 Loss: 0.54798 Threads: 8 Forward time: 3.63s Backward time: 4.86s Step time: 38.19s\n", + "20416000 Examples seen. Accuracy: 0.8424 Error: 0.34798 Loss: 0.37393 Threads: 8 Forward time: 3.43s Backward time: 4.49s Step time: 38.72s\n", + "20448000 Examples seen. Accuracy: 0.8337 Error: 0.43786 Loss: 0.48653 Threads: 8 Forward time: 3.51s Backward time: 4.59s Step time: 37.02s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 213 Examples seen:20448000 Validation Accuracy: 0.8084 Validation Error: 0.5130 Validation Loss: 0.6610 Total time: 438.41min\n", + "Epoch time: 1.8511 minutes. 500 epochs: 15.4258 hours.\n", + "Epochs: 213. Working time: 7.31 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big sa.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20480000 Examples seen. Accuracy: 0.8275 Error: 0.50148 Loss: 0.62272 Threads: 8 Forward time: 3.42s Backward time: 4.32s Step time: 37.67s\n", + "20512000 Examples seen. Accuracy: 0.8273 Error: 0.44816 Loss: 0.51527 Threads: 8 Forward time: 3.35s Backward time: 4.22s Step time: 37.13s\n", + "20544000 Examples seen. Accuracy: 0.8269 Error: 0.44481 Loss: 0.50156 Threads: 8 Forward time: 3.20s Backward time: 4.09s Step time: 37.64s\n", + "Starting Validation.\n", + "Epochs: 214 Examples seen:20544000 Validation Accuracy: 0.8068 Validation Error: 0.5140 Validation Loss: 0.6541 Total time: 440.33min\n", + "Epoch time: 1.8818 minutes. 500 epochs: 15.6813 hours.\n", + "Epochs: 214. Working time: 7.34 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big su.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "20576000 Examples seen. Accuracy: 0.8396 Error: 0.48230 Loss: 0.58656 Threads: 8 Forward time: 3.45s Backward time: 4.62s Step time: 38.10s\n", + "20608000 Examples seen. Accuracy: 0.8371 Error: 0.49161 Loss: 0.53622 Threads: 8 Forward time: 3.73s Backward time: 4.95s Step time: 40.41s\n", + "20640000 Examples seen. Accuracy: 0.8346 Error: 0.47860 Loss: 0.54621 Threads: 8 Forward time: 3.41s Backward time: 4.35s Step time: 38.94s\n", + "Starting Validation.\n", + "Epochs: 215 Examples seen:20640000 Validation Accuracy: 0.8003 Validation Error: 0.5181 Validation Loss: 0.6646 Total time: 442.34min\n", + "Epoch time: 1.9470 minutes. 500 epochs: 16.2250 hours.\n", + "Epochs: 215. Working time: 7.37 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big b.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20672000 Examples seen. Accuracy: 0.8292 Error: 0.46854 Loss: 0.55197 Threads: 8 Forward time: 3.53s Backward time: 4.54s Step time: 37.61s\n", + "20704000 Examples seen. Accuracy: 0.8307 Error: 0.47732 Loss: 0.49882 Threads: 8 Forward time: 3.33s Backward time: 4.28s Step time: 37.88s\n", + "20736000 Examples seen. Accuracy: 0.8306 Error: 0.47130 Loss: 0.59206 Threads: 8 Forward time: 3.46s Backward time: 4.35s Step time: 37.46s\n", + "Starting Validation.\n", + "Epochs: 216 Examples seen:20736000 Validation Accuracy: 0.8063 Validation Error: 0.5136 Validation Loss: 0.6576 Total time: 444.26min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.887 Min Weight: -0.913 Max Output: 0.710 Min Output: -0.723 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.08s Parent:0\n", + "Layer 2 Max Output: 0.710 Min Output: -0.723 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.603 Min Weight: -0.598 Max Output: 3.331 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.13s 0.09s Parent:2\n", + "Layer 4 Max Output: 3.331 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.04s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.331 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.326 Min Weight: -0.346 Max Output: 3.934 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.29s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.207 Min Weight: -0.225 Max Output: 2.406 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.64s 0.47s Parent:6\n", + "Layer 8 Max Output: 2.406 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.120 Min Weight: -0.154 Max Output: 1.445 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.74s 0.43s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.231 Min Weight: -0.189 Max Output: 4.034 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.933 Min Weight: -0.528 Max Output: 15.236 Min Output: -4.249 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.961 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8731 minutes. 500 epochs: 15.6096 hours.\n", + "Epochs: 216. Working time: 7.40 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big pa.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20768000 Examples seen. Accuracy: 0.8324 Error: 0.51539 Loss: 0.59485 Threads: 8 Forward time: 3.58s Backward time: 4.15s Step time: 37.96s\n", + "20800000 Examples seen. Accuracy: 0.8319 Error: 0.47998 Loss: 0.59819 Threads: 8 Forward time: 3.30s Backward time: 4.17s Step time: 37.29s\n", + "20832000 Examples seen. Accuracy: 0.8379 Error: 0.38428 Loss: 0.43361 Threads: 8 Forward time: 3.25s Backward time: 4.17s Step time: 37.96s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 217 Examples seen:20832000 Validation Accuracy: 0.8088 Validation Error: 0.4919 Validation Loss: 0.6578 Total time: 446.24min\n", + "Epoch time: 1.8979 minutes. 500 epochs: 15.8154 hours.\n", + "Epochs: 217. Working time: 7.44 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom and dad. she was .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "20864000 Examples seen. Accuracy: 0.8386 Error: 0.41449 Loss: 0.46024 Threads: 8 Forward time: 3.37s Backward time: 4.48s Step time: 37.74s\n", + "20896000 Examples seen. Accuracy: 0.8435 Error: 0.40474 Loss: 0.44842 Threads: 8 Forward time: 3.34s Backward time: 4.34s Step time: 37.41s\n", + "20928000 Examples seen. Accuracy: 0.8355 Error: 0.42819 Loss: 0.55254 Threads: 8 Forward time: 3.22s Backward time: 4.24s Step time: 37.70s\n", + "Starting Validation.\n", + "Epochs: 218 Examples seen:20928000 Validation Accuracy: 0.8038 Validation Error: 0.5060 Validation Loss: 0.6620 Total time: 448.16min\n", + "Epoch time: 1.8850 minutes. 500 epochs: 15.7083 hours.\n", + "Epochs: 218. Working time: 7.47 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "20960000 Examples seen. Accuracy: 0.8293 Error: 0.48331 Loss: 0.60304 Threads: 8 Forward time: 3.14s Backward time: 4.01s Step time: 37.05s\n", + "20992000 Examples seen. Accuracy: 0.8321 Error: 0.41923 Loss: 0.54188 Threads: 8 Forward time: 3.50s Backward time: 4.54s Step time: 37.24s\n", + "21024000 Examples seen. Accuracy: 0.8335 Error: 0.45247 Loss: 0.57080 Threads: 8 Forward time: 3.24s Backward time: 4.10s Step time: 37.85s\n", + "Starting Validation.\n", + "Epochs: 219 Examples seen:21024000 Validation Accuracy: 0.8088 Validation Error: 0.5045 Validation Loss: 0.6513 Total time: 450.08min\n", + "Epoch time: 1.8924 minutes. 500 epochs: 15.7700 hours.\n", + "Epochs: 219. Working time: 7.50 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "21056000 Examples seen. Accuracy: 0.8324 Error: 0.46931 Loss: 0.61630 Threads: 8 Forward time: 3.28s Backward time: 4.31s Step time: 37.34s\n", + "21088000 Examples seen. Accuracy: 0.8406 Error: 0.38483 Loss: 0.48100 Threads: 8 Forward time: 3.53s Backward time: 4.63s Step time: 37.36s\n", + "21120000 Examples seen. Accuracy: 0.8475 Error: 0.42289 Loss: 0.50080 Threads: 8 Forward time: 3.35s Backward time: 4.36s Step time: 37.72s\n", + "Starting Validation.\n", + "Epochs: 220 Examples seen:21120000 Validation Accuracy: 0.8080 Validation Error: 0.4979 Validation Loss: 0.6588 Total time: 451.99min\n", + "Starting Testing.\n", + "Epochs: 220 Examples seen:21120000 Test Accuracy: 0.8080 Test Error: 0.4979 Test Loss: 0.6588 Total time: 452.03min\n", + "Epoch time: 1.8861 minutes. 500 epochs: 15.7179 hours.\n", + "Epochs: 220. Working time: 7.53 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "21152000 Examples seen. Accuracy: 0.8397 Error: 0.48568 Loss: 0.60618 Threads: 8 Forward time: 3.30s Backward time: 4.16s Step time: 38.61s\n", + "21184000 Examples seen. Accuracy: 0.8496 Error: 0.35863 Loss: 0.39717 Threads: 8 Forward time: 3.62s Backward time: 4.85s Step time: 38.36s\n", + "21216000 Examples seen. Accuracy: 0.8431 Error: 0.47239 Loss: 0.65843 Threads: 8 Forward time: 3.89s Backward time: 5.19s Step time: 40.03s\n", + "Starting Validation.\n", + "Epochs: 221 Examples seen:21216000 Validation Accuracy: 0.8049 Validation Error: 0.4949 Validation Loss: 0.6608 Total time: 454.03min\n", + "Epoch time: 2.0016 minutes. 500 epochs: 16.6796 hours.\n", + "Epochs: 221. Working time: 7.57 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "21248000 Examples seen. Accuracy: 0.8346 Error: 0.49059 Loss: 0.62692 Threads: 8 Forward time: 3.36s Backward time: 4.35s Step time: 38.76s\n", + "21280000 Examples seen. Accuracy: 0.8315 Error: 0.44150 Loss: 0.53508 Threads: 8 Forward time: 3.42s Backward time: 4.39s Step time: 37.74s\n", + "21312000 Examples seen. Accuracy: 0.8309 Error: 0.42211 Loss: 0.44753 Threads: 8 Forward time: 3.16s Backward time: 4.08s Step time: 37.11s\n", + "Starting Validation.\n", + "Epochs: 222 Examples seen:21312000 Validation Accuracy: 0.8051 Validation Error: 0.5049 Validation Loss: 0.6585 Total time: 455.97min\n", + "Epoch time: 1.8553 minutes. 500 epochs: 15.4608 hours.\n", + "Epochs: 222. Working time: 7.60 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "21344000 Examples seen. Accuracy: 0.8288 Error: 0.39845 Loss: 0.43901 Threads: 8 Forward time: 3.53s Backward time: 4.73s Step time: 39.82s\n", + "21376000 Examples seen. Accuracy: 0.8308 Error: 0.50120 Loss: 0.59072 Threads: 8 Forward time: 3.69s Backward time: 4.84s Step time: 39.23s\n", + "21408000 Examples seen. Accuracy: 0.8312 Error: 0.52372 Loss: 0.62846 Threads: 8 Forward time: 3.38s Backward time: 4.39s Step time: 39.05s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 223 Examples seen:21408000 Validation Accuracy: 0.8099 Validation Error: 0.4982 Validation Loss: 0.6503 Total time: 458.03min\n", + "Epoch time: 1.9526 minutes. 500 epochs: 16.2717 hours.\n", + "Epochs: 223. Working time: 7.63 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big st.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "21440000 Examples seen. Accuracy: 0.8296 Error: 0.39668 Loss: 0.46508 Threads: 8 Forward time: 3.37s Backward time: 4.43s Step time: 40.32s\n", + "21472000 Examples seen. Accuracy: 0.8389 Error: 0.50699 Loss: 0.62551 Threads: 8 Forward time: 3.68s Backward time: 4.88s Step time: 41.24s\n", + "21504000 Examples seen. Accuracy: 0.8339 Error: 0.50146 Loss: 0.55616 Threads: 8 Forward time: 3.57s Backward time: 4.44s Step time: 40.30s\n", + "Starting Validation.\n", + "Epochs: 224 Examples seen:21504000 Validation Accuracy: 0.8063 Validation Error: 0.5045 Validation Loss: 0.6455 Total time: 460.11min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.891 Min Weight: -0.919 Max Output: 0.712 Min Output: -0.726 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.712 Min Output: -0.726 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.607 Min Weight: -0.600 Max Output: 3.370 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.370 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.04s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.370 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.332 Min Weight: -0.350 Max Output: 3.924 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.215 Min Weight: -0.225 Max Output: 2.623 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.72s 0.50s Parent:6\n", + "Layer 8 Max Output: 2.623 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.122 Min Weight: -0.155 Max Output: 1.378 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.85s 0.46s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.234 Min Weight: -0.191 Max Output: 3.586 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.942 Min Weight: -0.533 Max Output: 14.937 Min Output: -4.241 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.961 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 2.0150 minutes. 500 epochs: 16.7913 hours.\n", + "Epochs: 224. Working time: 7.67 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were very mor.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "21536000 Examples seen. Accuracy: 0.8312 Error: 0.46341 Loss: 0.62606 Threads: 8 Forward time: 3.59s Backward time: 4.68s Step time: 41.67s\n", + "21568000 Examples seen. Accuracy: 0.8300 Error: 0.43509 Loss: 0.54322 Threads: 8 Forward time: 3.40s Backward time: 4.32s Step time: 39.74s\n", + "21600000 Examples seen. Accuracy: 0.8348 Error: 0.40768 Loss: 0.50460 Threads: 8 Forward time: 3.70s Backward time: 4.89s Step time: 40.16s\n", + "Starting Validation.\n", + "Epochs: 225 Examples seen:21600000 Validation Accuracy: 0.8013 Validation Error: 0.4977 Validation Loss: 0.6525 Total time: 462.18min\n", + "Epoch time: 2.0079 minutes. 500 epochs: 16.7325 hours.\n", + "Epochs: 225. Working time: 7.70 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big box with her mo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "21632000 Examples seen. Accuracy: 0.8456 Error: 0.47268 Loss: 0.57254 Threads: 8 Forward time: 4.26s Backward time: 6.06s Step time: 40.72s\n", + "21664000 Examples seen. Accuracy: 0.8431 Error: 0.35610 Loss: 0.35602 Threads: 8 Forward time: 3.41s Backward time: 4.41s Step time: 41.20s\n", + "21696000 Examples seen. Accuracy: 0.8460 Error: 0.43375 Loss: 0.49474 Threads: 8 Forward time: 3.66s Backward time: 4.50s Step time: 41.02s\n", + "Starting Validation.\n", + "Epochs: 226 Examples seen:21696000 Validation Accuracy: 0.8053 Validation Error: 0.4938 Validation Loss: 0.6556 Total time: 464.28min\n", + "Epoch time: 2.0510 minutes. 500 epochs: 17.0913 hours.\n", + "Epochs: 226. Working time: 7.74 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "21728000 Examples seen. Accuracy: 0.8480 Error: 0.42357 Loss: 0.47110 Threads: 8 Forward time: 3.27s Backward time: 4.16s Step time: 39.68s\n", + "21760000 Examples seen. Accuracy: 0.8436 Error: 0.42925 Loss: 0.54018 Threads: 8 Forward time: 3.53s Backward time: 4.50s Step time: 39.17s\n", + "21792000 Examples seen. Accuracy: 0.8388 Error: 0.48975 Loss: 0.59653 Threads: 8 Forward time: 3.35s Backward time: 4.29s Step time: 39.38s\n", + "Starting Validation.\n", + "Epochs: 227 Examples seen:21792000 Validation Accuracy: 0.8070 Validation Error: 0.4958 Validation Loss: 0.6482 Total time: 466.30min\n", + "Epoch time: 1.9692 minutes. 500 epochs: 16.4096 hours.\n", + "Epochs: 227. Working time: 7.77 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they were playing .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "21824000 Examples seen. Accuracy: 0.8377 Error: 0.46070 Loss: 0.58220 Threads: 8 Forward time: 3.40s Backward time: 4.37s Step time: 38.63s\n", + "21856000 Examples seen. Accuracy: 0.8377 Error: 0.41736 Loss: 0.53189 Threads: 8 Forward time: 3.32s Backward time: 4.31s Step time: 40.14s\n", + "21888000 Examples seen. Accuracy: 0.8332 Error: 0.48210 Loss: 0.60272 Threads: 8 Forward time: 3.40s Backward time: 4.35s Step time: 38.22s\n", + "Starting Validation.\n", + "Epochs: 228 Examples seen:21888000 Validation Accuracy: 0.8097 Validation Error: 0.5085 Validation Loss: 0.6387 Total time: 468.29min\n", + "Epoch time: 1.9111 minutes. 500 epochs: 15.9258 hours.\n", + "Epochs: 228. Working time: 7.80 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "21920000 Examples seen. Accuracy: 0.8343 Error: 0.45134 Loss: 0.55930 Threads: 8 Forward time: 3.40s Backward time: 4.26s Step time: 39.10s\n", + "21952000 Examples seen. Accuracy: 0.8318 Error: 0.46526 Loss: 0.62558 Threads: 8 Forward time: 3.83s Backward time: 5.02s Step time: 39.48s\n", + "21984000 Examples seen. Accuracy: 0.8389 Error: 0.46989 Loss: 0.52206 Threads: 8 Forward time: 3.94s Backward time: 5.30s Step time: 43.23s\n", + "Starting Validation.\n", + "Epochs: 229 Examples seen:21984000 Validation Accuracy: 0.8070 Validation Error: 0.4980 Validation Loss: 0.6488 Total time: 470.37min\n", + "Epoch time: 2.1617 minutes. 500 epochs: 18.0138 hours.\n", + "Epochs: 229. Working time: 7.84 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "22016000 Examples seen. Accuracy: 0.8450 Error: 0.42050 Loss: 0.49283 Threads: 8 Forward time: 3.65s Backward time: 4.86s Step time: 41.26s\n", + "22048000 Examples seen. Accuracy: 0.8436 Error: 0.42585 Loss: 0.51113 Threads: 8 Forward time: 3.51s Backward time: 4.47s Step time: 41.15s\n", + "22080000 Examples seen. Accuracy: 0.8395 Error: 0.37141 Loss: 0.38644 Threads: 8 Forward time: 3.31s Backward time: 4.11s Step time: 39.79s\n", + "Starting Validation.\n", + "Epochs: 230 Examples seen:22080000 Validation Accuracy: 0.8061 Validation Error: 0.4941 Validation Loss: 0.6509 Total time: 472.45min\n", + "Starting Testing.\n", + "Epochs: 230 Examples seen:22080000 Test Accuracy: 0.8061 Test Error: 0.4941 Test Loss: 0.6509 Total time: 472.49min\n", + "Epoch time: 1.9895 minutes. 500 epochs: 16.5792 hours.\n", + "Epochs: 230. Working time: 7.87 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. they were so excit.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "22112000 Examples seen. Accuracy: 0.8457 Error: 0.39546 Loss: 0.49046 Threads: 8 Forward time: 3.36s Backward time: 4.41s Step time: 40.21s\n", + "22144000 Examples seen. Accuracy: 0.8424 Error: 0.40985 Loss: 0.44275 Threads: 8 Forward time: 3.72s Backward time: 4.57s Step time: 40.61s\n", + "22176000 Examples seen. Accuracy: 0.8354 Error: 0.46489 Loss: 0.57407 Threads: 8 Forward time: 3.65s Backward time: 4.70s Step time: 41.90s\n", + "Starting Validation.\n", + "Epochs: 231 Examples seen:22176000 Validation Accuracy: 0.8057 Validation Error: 0.5144 Validation Loss: 0.6442 Total time: 474.59min\n", + "Epoch time: 2.0948 minutes. 500 epochs: 17.4563 hours.\n", + "Epochs: 231. Working time: 7.91 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she had a big box of her fa.\n", + "one day, a little girl named mia went for a walk in the park. she was three years.\n", + "once upon a time, there was a little girl named lily. she had a big box of her fa.\n", + "Max prediction pos is: 81\n", + "22208000 Examples seen. Accuracy: 0.8331 Error: 0.46362 Loss: 0.58456 Threads: 8 Forward time: 3.70s Backward time: 4.83s Step time: 40.21s\n", + "22240000 Examples seen. Accuracy: 0.8335 Error: 0.49250 Loss: 0.63514 Threads: 8 Forward time: 3.53s Backward time: 4.39s Step time: 40.54s\n", + "22272000 Examples seen. Accuracy: 0.8454 Error: 0.44072 Loss: 0.47542 Threads: 8 Forward time: 3.29s Backward time: 4.23s Step time: 39.30s\n", + "Starting Validation.\n", + "Epochs: 232 Examples seen:22272000 Validation Accuracy: 0.8084 Validation Error: 0.4871 Validation Loss: 0.6406 Total time: 476.64min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.892 Min Weight: -0.924 Max Output: 0.712 Min Output: -0.728 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.712 Min Output: -0.728 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.610 Min Weight: -0.607 Max Output: 3.444 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.444 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.00s Parent:3\n", + "Layer 5 Max Output: 3.444 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.333 Min Weight: -0.353 Max Output: 3.794 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.46s 0.26s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.216 Min Weight: -0.226 Max Output: 2.562 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.59s 0.47s Parent:6\n", + "Layer 8 Max Output: 2.562 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.121 Min Weight: -0.159 Max Output: 1.435 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.80s 0.43s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.237 Min Weight: -0.193 Max Output: 4.209 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.957 Min Weight: -0.540 Max Output: 17.259 Min Output: -4.728 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.988 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.9649 minutes. 500 epochs: 16.3746 hours.\n", + "Epochs: 232. Working time: 7.94 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "22304000 Examples seen. Accuracy: 0.8420 Error: 0.41559 Loss: 0.51705 Threads: 8 Forward time: 4.06s Backward time: 5.50s Step time: 39.81s\n", + "22336000 Examples seen. Accuracy: 0.8385 Error: 0.45321 Loss: 0.56640 Threads: 8 Forward time: 3.45s Backward time: 4.35s Step time: 38.47s\n", + "22368000 Examples seen. Accuracy: 0.8334 Error: 0.44868 Loss: 0.54457 Threads: 8 Forward time: 3.71s Backward time: 4.62s Step time: 39.28s\n", + "Starting Validation.\n", + "Epochs: 233 Examples seen:22368000 Validation Accuracy: 0.8095 Validation Error: 0.4950 Validation Loss: 0.6440 Total time: 478.64min\n", + "Epoch time: 1.9640 minutes. 500 epochs: 16.3667 hours.\n", + "Epochs: 233. Working time: 7.98 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy was playing in the park. she was so excited bec.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "22400000 Examples seen. Accuracy: 0.8427 Error: 0.48473 Loss: 0.59734 Threads: 8 Forward time: 3.88s Backward time: 5.15s Step time: 41.08s\n", + "22432000 Examples seen. Accuracy: 0.8363 Error: 0.43596 Loss: 0.59971 Threads: 8 Forward time: 3.56s Backward time: 4.48s Step time: 40.02s\n", + "22464000 Examples seen. Accuracy: 0.8475 Error: 0.46152 Loss: 0.58494 Threads: 8 Forward time: 3.49s Backward time: 4.73s Step time: 39.57s\n", + "Starting Validation.\n", + "Epochs: 234 Examples seen:22464000 Validation Accuracy: 0.8043 Validation Error: 0.4866 Validation Loss: 0.6538 Total time: 480.70min\n", + "Epoch time: 1.9783 minutes. 500 epochs: 16.4858 hours.\n", + "Epochs: 234. Working time: 8.01 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "22496000 Examples seen. Accuracy: 0.8510 Error: 0.37444 Loss: 0.38331 Threads: 8 Forward time: 3.23s Backward time: 4.09s Step time: 38.73s\n", + "22528000 Examples seen. Accuracy: 0.8381 Error: 0.36670 Loss: 0.39057 Threads: 8 Forward time: 3.76s Backward time: 5.00s Step time: 38.22s\n", + "22560000 Examples seen. Accuracy: 0.8322 Error: 0.52910 Loss: 0.69482 Threads: 8 Forward time: 3.17s Backward time: 3.87s Step time: 39.88s\n", + "Starting Validation.\n", + "Epochs: 235 Examples seen:22560000 Validation Accuracy: 0.8045 Validation Error: 0.5032 Validation Loss: 0.6441 Total time: 482.69min\n", + "Epoch time: 1.9938 minutes. 500 epochs: 16.6150 hours.\n", + "Epochs: 235. Working time: 8.04 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim was very hungry. he wanted to go on an adventure..\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "22592000 Examples seen. Accuracy: 0.8453 Error: 0.44058 Loss: 0.57613 Threads: 8 Forward time: 3.70s Backward time: 5.02s Step time: 38.25s\n", + "22624000 Examples seen. Accuracy: 0.8407 Error: 0.46950 Loss: 0.56988 Threads: 8 Forward time: 3.39s Backward time: 4.37s Step time: 38.81s\n", + "22656000 Examples seen. Accuracy: 0.8332 Error: 0.45697 Loss: 0.52471 Threads: 8 Forward time: 3.55s Backward time: 4.50s Step time: 39.26s\n", + "Starting Validation.\n", + "Epochs: 236 Examples seen:22656000 Validation Accuracy: 0.8093 Validation Error: 0.4987 Validation Loss: 0.6433 Total time: 484.67min\n", + "Epoch time: 1.9631 minutes. 500 epochs: 16.3592 hours.\n", + "Epochs: 236. Working time: 8.08 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were best fri.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "22688000 Examples seen. Accuracy: 0.8336 Error: 0.46814 Loss: 0.55409 Threads: 8 Forward time: 3.82s Backward time: 4.78s Step time: 39.28s\n", + "22720000 Examples seen. Accuracy: 0.8320 Error: 0.45088 Loss: 0.61803 Threads: 8 Forward time: 3.52s Backward time: 4.49s Step time: 38.39s\n", + "22752000 Examples seen. Accuracy: 0.8320 Error: 0.46688 Loss: 0.54501 Threads: 8 Forward time: 3.32s Backward time: 4.27s Step time: 38.93s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 237 Examples seen:22752000 Validation Accuracy: 0.8128 Validation Error: 0.5004 Validation Loss: 0.6374 Total time: 486.71min\n", + "Epoch time: 1.9465 minutes. 500 epochs: 16.2212 hours.\n", + "Epochs: 237. Working time: 8.11 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big st.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "22784000 Examples seen. Accuracy: 0.8328 Error: 0.50102 Loss: 0.65818 Threads: 8 Forward time: 3.75s Backward time: 4.85s Step time: 38.48s\n", + "22816000 Examples seen. Accuracy: 0.8320 Error: 0.46575 Loss: 0.61655 Threads: 8 Forward time: 3.38s Backward time: 4.39s Step time: 38.90s\n", + "22848000 Examples seen. Accuracy: 0.8440 Error: 0.39221 Loss: 0.46665 Threads: 8 Forward time: 3.61s Backward time: 4.63s Step time: 40.74s\n", + "Starting Validation.\n", + "Epochs: 238 Examples seen:22848000 Validation Accuracy: 0.8088 Validation Error: 0.4979 Validation Loss: 0.6414 Total time: 488.72min\n", + "Epoch time: 2.0372 minutes. 500 epochs: 16.9767 hours.\n", + "Epochs: 238. Working time: 8.15 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl was walking in the park. she saw a big shiny and she was v.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "22880000 Examples seen. Accuracy: 0.8377 Error: 0.40757 Loss: 0.54019 Threads: 8 Forward time: 3.59s Backward time: 4.79s Step time: 40.66s\n", + "22912000 Examples seen. Accuracy: 0.8354 Error: 0.41007 Loss: 0.46969 Threads: 8 Forward time: 3.66s Backward time: 4.62s Step time: 40.64s\n", + "22944000 Examples seen. Accuracy: 0.8327 Error: 0.44055 Loss: 0.54880 Threads: 8 Forward time: 3.56s Backward time: 4.73s Step time: 40.81s\n", + "Starting Validation.\n", + "Epochs: 239 Examples seen:22944000 Validation Accuracy: 0.8074 Validation Error: 0.5064 Validation Loss: 0.6477 Total time: 490.81min\n", + "Epoch time: 2.0407 minutes. 500 epochs: 17.0054 hours.\n", + "Epochs: 239. Working time: 8.18 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big st.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "22976000 Examples seen. Accuracy: 0.8306 Error: 0.51365 Loss: 0.58425 Threads: 8 Forward time: 3.37s Backward time: 4.21s Step time: 39.52s\n", + "23008000 Examples seen. Accuracy: 0.8316 Error: 0.44005 Loss: 0.56717 Threads: 8 Forward time: 3.76s Backward time: 4.75s Step time: 36.20s\n", + "23040000 Examples seen. Accuracy: 0.8314 Error: 0.47068 Loss: 0.60704 Threads: 8 Forward time: 3.30s Backward time: 4.27s Step time: 37.24s\n", + "Starting Validation.\n", + "Epochs: 240 Examples seen:23040000 Validation Accuracy: 0.8068 Validation Error: 0.5036 Validation Loss: 0.6444 Total time: 492.74min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.895 Min Weight: -0.930 Max Output: 0.714 Min Output: -0.731 TNNetPointwiseConv 81,1,32 Times: 0.23s 0.07s Parent:0\n", + "Layer 2 Max Output: 0.714 Min Output: -0.731 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.616 Min Weight: -0.610 Max Output: 3.442 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.08s Parent:2\n", + "Layer 4 Max Output: 3.442 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.442 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.333 Min Weight: -0.352 Max Output: 4.101 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.44s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.216 Min Weight: -0.228 Max Output: 2.520 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.62s 0.47s Parent:6\n", + "Layer 8 Max Output: 2.520 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.122 Min Weight: -0.160 Max Output: 1.407 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.79s 0.42s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.239 Min Weight: -0.193 Max Output: 3.387 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.957 Min Weight: -0.542 Max Output: 15.687 Min Output: -4.296 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.986 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 240 Examples seen:23040000 Test Accuracy: 0.8068 Test Error: 0.5036 Test Loss: 0.6444 Total time: 492.78min\n", + "Epoch time: 1.8620 minutes. 500 epochs: 15.5167 hours.\n", + "Epochs: 240. Working time: 8.21 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "23072000 Examples seen. Accuracy: 0.8415 Error: 0.41160 Loss: 0.45777 Threads: 8 Forward time: 3.17s Backward time: 4.01s Step time: 37.72s\n", + "23104000 Examples seen. Accuracy: 0.8370 Error: 0.45480 Loss: 0.56846 Threads: 8 Forward time: 3.45s Backward time: 4.44s Step time: 37.32s\n", + "23136000 Examples seen. Accuracy: 0.8347 Error: 0.43004 Loss: 0.43232 Threads: 8 Forward time: 3.51s Backward time: 4.34s Step time: 37.39s\n", + "Starting Validation.\n", + "Epochs: 241 Examples seen:23136000 Validation Accuracy: 0.8063 Validation Error: 0.5067 Validation Loss: 0.6493 Total time: 494.70min\n", + "Epoch time: 1.8693 minutes. 500 epochs: 15.5779 hours.\n", + "Epochs: 241. Working time: 8.24 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily was playing in her backyard. she saw a big brow.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "23168000 Examples seen. Accuracy: 0.8346 Error: 0.41286 Loss: 0.49664 Threads: 8 Forward time: 3.45s Backward time: 4.32s Step time: 36.99s\n", + "23200000 Examples seen. Accuracy: 0.8333 Error: 0.46259 Loss: 0.53008 Threads: 8 Forward time: 3.42s Backward time: 4.26s Step time: 36.55s\n", + "23232000 Examples seen. Accuracy: 0.8323 Error: 0.45815 Loss: 0.52147 Threads: 8 Forward time: 3.50s Backward time: 4.64s Step time: 36.32s\n", + "Starting Validation.\n", + "Epochs: 242 Examples seen:23232000 Validation Accuracy: 0.8082 Validation Error: 0.4985 Validation Loss: 0.6358 Total time: 496.58min\n", + "Epoch time: 1.8162 minutes. 500 epochs: 15.1354 hours.\n", + "Epochs: 242. Working time: 8.28 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mommy. they saw a big.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "23264000 Examples seen. Accuracy: 0.8298 Error: 0.43971 Loss: 0.53792 Threads: 8 Forward time: 3.31s Backward time: 4.17s Step time: 37.75s\n", + "23296000 Examples seen. Accuracy: 0.8318 Error: 0.43147 Loss: 0.50871 Threads: 8 Forward time: 3.60s Backward time: 4.59s Step time: 36.92s\n", + "23328000 Examples seen. Accuracy: 0.8405 Error: 0.37094 Loss: 0.40153 Threads: 8 Forward time: 3.23s Backward time: 3.98s Step time: 37.54s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 243 Examples seen:23328000 Validation Accuracy: 0.8130 Validation Error: 0.4868 Validation Loss: 0.6421 Total time: 498.53min\n", + "Epoch time: 1.8769 minutes. 500 epochs: 15.6404 hours.\n", + "Epochs: 243. Working time: 8.31 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. they had a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "23360000 Examples seen. Accuracy: 0.8424 Error: 0.44895 Loss: 0.50016 Threads: 8 Forward time: 3.50s Backward time: 4.22s Step time: 38.05s\n", + "23392000 Examples seen. Accuracy: 0.8372 Error: 0.43117 Loss: 0.51689 Threads: 8 Forward time: 3.46s Backward time: 4.47s Step time: 38.41s\n", + "23424000 Examples seen. Accuracy: 0.8353 Error: 0.43499 Loss: 0.52901 Threads: 8 Forward time: 3.08s Backward time: 3.76s Step time: 37.99s\n", + "Starting Validation.\n", + "Epochs: 244 Examples seen:23424000 Validation Accuracy: 0.8116 Validation Error: 0.4921 Validation Loss: 0.6434 Total time: 500.48min\n", + "Epoch time: 1.8993 minutes. 500 epochs: 15.8271 hours.\n", + "Epochs: 244. Working time: 8.34 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom and dad. she was .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "23456000 Examples seen. Accuracy: 0.8357 Error: 0.51054 Loss: 0.66712 Threads: 8 Forward time: 3.77s Backward time: 4.55s Step time: 37.49s\n", + "23488000 Examples seen. Accuracy: 0.8427 Error: 0.38052 Loss: 0.43897 Threads: 8 Forward time: 3.37s Backward time: 4.30s Step time: 36.99s\n", + "23520000 Examples seen. Accuracy: 0.8467 Error: 0.37787 Loss: 0.36990 Threads: 8 Forward time: 3.65s Backward time: 4.88s Step time: 37.33s\n", + "Starting Validation.\n", + "Epochs: 245 Examples seen:23520000 Validation Accuracy: 0.8095 Validation Error: 0.4901 Validation Loss: 0.6508 Total time: 502.39min\n", + "Epoch time: 1.8667 minutes. 500 epochs: 15.5558 hours.\n", + "Epochs: 245. Working time: 8.37 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named amy was walking in the park. she was very excited to.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "23552000 Examples seen. Accuracy: 0.8540 Error: 0.45121 Loss: 0.58464 Threads: 8 Forward time: 3.20s Backward time: 4.09s Step time: 36.72s\n", + "23584000 Examples seen. Accuracy: 0.8526 Error: 0.38541 Loss: 0.39475 Threads: 8 Forward time: 3.86s Backward time: 4.88s Step time: 38.10s\n", + "23616000 Examples seen. Accuracy: 0.8436 Error: 0.42944 Loss: 0.52154 Threads: 8 Forward time: 3.54s Backward time: 4.40s Step time: 38.24s\n", + "Starting Validation.\n", + "Epochs: 246 Examples seen:23616000 Validation Accuracy: 0.8080 Validation Error: 0.4921 Validation Loss: 0.6385 Total time: 504.32min\n", + "Epoch time: 1.9119 minutes. 500 epochs: 15.9321 hours.\n", + "Epochs: 246. Working time: 8.41 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they had a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "23648000 Examples seen. Accuracy: 0.8397 Error: 0.40453 Loss: 0.50442 Threads: 8 Forward time: 3.82s Backward time: 4.81s Step time: 37.94s\n", + "23680000 Examples seen. Accuracy: 0.8319 Error: 0.46045 Loss: 0.51807 Threads: 8 Forward time: 3.98s Backward time: 5.04s Step time: 38.78s\n", + "23712000 Examples seen. Accuracy: 0.8309 Error: 0.45447 Loss: 0.58577 Threads: 8 Forward time: 3.51s Backward time: 4.35s Step time: 37.43s\n", + "Starting Validation.\n", + "Epochs: 247 Examples seen:23712000 Validation Accuracy: 0.8120 Validation Error: 0.4986 Validation Loss: 0.6370 Total time: 506.27min\n", + "Epoch time: 1.8717 minutes. 500 epochs: 15.5971 hours.\n", + "Epochs: 247. Working time: 8.44 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in the park. she saw a big box of t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "23744000 Examples seen. Accuracy: 0.8324 Error: 0.42722 Loss: 0.50096 Threads: 8 Forward time: 3.23s Backward time: 4.01s Step time: 37.37s\n", + "23776000 Examples seen. Accuracy: 0.8460 Error: 0.40788 Loss: 0.45619 Threads: 8 Forward time: 3.53s Backward time: 4.53s Step time: 36.71s\n", + "23808000 Examples seen. Accuracy: 0.8487 Error: 0.41185 Loss: 0.46161 Threads: 8 Forward time: 3.55s Backward time: 4.58s Step time: 36.98s\n", + "Starting Validation.\n", + "Epochs: 248 Examples seen:23808000 Validation Accuracy: 0.8091 Validation Error: 0.4869 Validation Loss: 0.6389 Total time: 508.16min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.899 Min Weight: -0.932 Max Output: 0.716 Min Output: -0.731 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.716 Min Output: -0.731 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.625 Min Weight: -0.617 Max Output: 3.512 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.13s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.512 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.512 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.335 Min Weight: -0.356 Max Output: 3.893 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.36s 0.27s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.220 Min Weight: -0.232 Max Output: 2.757 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.65s 0.53s Parent:6\n", + "Layer 8 Max Output: 2.757 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.125 Min Weight: -0.166 Max Output: 1.624 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.75s 0.45s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.243 Min Weight: -0.196 Max Output: 4.319 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.06s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.962 Min Weight: -0.547 Max Output: 17.144 Min Output: -4.809 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.969 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8490 minutes. 500 epochs: 15.4079 hours.\n", + "Epochs: 248. Working time: 8.47 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sun was shin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "23840000 Examples seen. Accuracy: 0.8381 Error: 0.45011 Loss: 0.52692 Threads: 8 Forward time: 3.36s Backward time: 4.22s Step time: 36.30s\n", + "23872000 Examples seen. Accuracy: 0.8444 Error: 0.50996 Loss: 0.64501 Threads: 8 Forward time: 3.62s Backward time: 4.79s Step time: 36.89s\n", + "23904000 Examples seen. Accuracy: 0.8391 Error: 0.41291 Loss: 0.48734 Threads: 8 Forward time: 3.42s Backward time: 4.33s Step time: 36.37s\n", + "Starting Validation.\n", + "Epochs: 249 Examples seen:23904000 Validation Accuracy: 0.8095 Validation Error: 0.4959 Validation Loss: 0.6311 Total time: 510.04min\n", + "Epoch time: 1.8183 minutes. 500 epochs: 15.1529 hours.\n", + "Epochs: 249. Working time: 8.50 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "23936000 Examples seen. Accuracy: 0.8398 Error: 0.42699 Loss: 0.45777 Threads: 8 Forward time: 3.38s Backward time: 4.37s Step time: 37.88s\n", + "23968000 Examples seen. Accuracy: 0.8471 Error: 0.40172 Loss: 0.46578 Threads: 8 Forward time: 3.33s Backward time: 4.24s Step time: 37.61s\n", + "24000000 Examples seen. Accuracy: 0.8472 Error: 0.38927 Loss: 0.42772 Threads: 8 Forward time: 3.42s Backward time: 4.34s Step time: 37.21s\n", + "Starting Validation.\n", + "Epochs: 250 Examples seen:24000000 Validation Accuracy: 0.8122 Validation Error: 0.4839 Validation Loss: 0.6456 Total time: 511.96min\n", + "Starting Testing.\n", + "Epochs: 250 Examples seen:24000000 Test Accuracy: 0.8122 Test Error: 0.4839 Test Loss: 0.6456 Total time: 512.00min\n", + "Epoch time: 1.8604 minutes. 500 epochs: 15.5033 hours.\n", + "Epochs: 250. Working time: 8.53 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "24032000 Examples seen. Accuracy: 0.8553 Error: 0.38866 Loss: 0.47409 Threads: 8 Forward time: 3.62s Backward time: 4.74s Step time: 37.74s\n", + "24064000 Examples seen. Accuracy: 0.8501 Error: 0.45186 Loss: 0.51070 Threads: 8 Forward time: 3.15s Backward time: 3.81s Step time: 37.38s\n", + "24096000 Examples seen. Accuracy: 0.8467 Error: 0.38841 Loss: 0.50043 Threads: 8 Forward time: 3.49s Backward time: 4.41s Step time: 37.61s\n", + "Starting Validation.\n", + "Epochs: 251 Examples seen:24096000 Validation Accuracy: 0.8076 Validation Error: 0.4940 Validation Loss: 0.6386 Total time: 513.92min\n", + "Epoch time: 1.8806 minutes. 500 epochs: 15.6721 hours.\n", + "Epochs: 251. Working time: 8.57 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside w.\n", + "one day, a little boy named tim went to the park with his mom. they were best fri.\n", + "once upon a time, there was a little girl named lily. she loved to play outside w.\n", + "Max prediction pos is: 81\n", + "24128000 Examples seen. Accuracy: 0.8373 Error: 0.36201 Loss: 0.44045 Threads: 8 Forward time: 3.62s Backward time: 4.51s Step time: 38.38s\n", + "24160000 Examples seen. Accuracy: 0.8335 Error: 0.44164 Loss: 0.52388 Threads: 8 Forward time: 3.54s Backward time: 4.64s Step time: 37.74s\n", + "24192000 Examples seen. Accuracy: 0.8323 Error: 0.44377 Loss: 0.54484 Threads: 8 Forward time: 3.59s Backward time: 4.63s Step time: 36.68s\n", + "Starting Validation.\n", + "Epochs: 252 Examples seen:24192000 Validation Accuracy: 0.8074 Validation Error: 0.4999 Validation Loss: 0.6403 Total time: 515.85min\n", + "Epoch time: 1.8341 minutes. 500 epochs: 15.2838 hours.\n", + "Epochs: 252. Working time: 8.60 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named sue went to the park with her mom. she saw a big str.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "24224000 Examples seen. Accuracy: 0.8490 Error: 0.38748 Loss: 0.44258 Threads: 8 Forward time: 3.54s Backward time: 4.71s Step time: 38.69s\n", + "24256000 Examples seen. Accuracy: 0.8427 Error: 0.42943 Loss: 0.53912 Threads: 8 Forward time: 3.49s Backward time: 4.50s Step time: 38.59s\n", + "24288000 Examples seen. Accuracy: 0.8406 Error: 0.40254 Loss: 0.45358 Threads: 8 Forward time: 3.45s Backward time: 4.35s Step time: 38.19s\n", + "Starting Validation.\n", + "Epochs: 253 Examples seen:24288000 Validation Accuracy: 0.8063 Validation Error: 0.4953 Validation Loss: 0.6396 Total time: 517.82min\n", + "Epoch time: 1.9096 minutes. 500 epochs: 15.9137 hours.\n", + "Epochs: 253. Working time: 8.63 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy was walking in the park. she was very curious a.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "24320000 Examples seen. Accuracy: 0.8341 Error: 0.50367 Loss: 0.64307 Threads: 8 Forward time: 3.69s Backward time: 4.79s Step time: 38.16s\n", + "24352000 Examples seen. Accuracy: 0.8338 Error: 0.43686 Loss: 0.53697 Threads: 8 Forward time: 3.48s Backward time: 4.43s Step time: 38.06s\n", + "24384000 Examples seen. Accuracy: 0.8339 Error: 0.37032 Loss: 0.39313 Threads: 8 Forward time: 3.36s Backward time: 4.31s Step time: 37.96s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 254 Examples seen:24384000 Validation Accuracy: 0.8141 Validation Error: 0.4914 Validation Loss: 0.6212 Total time: 519.81min\n", + "Epoch time: 1.8982 minutes. 500 epochs: 15.8187 hours.\n", + "Epochs: 254. Working time: 8.66 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was playing with her toys. she was three years .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "24416000 Examples seen. Accuracy: 0.8506 Error: 0.39383 Loss: 0.48929 Threads: 8 Forward time: 3.33s Backward time: 4.15s Step time: 38.74s\n", + "24448000 Examples seen. Accuracy: 0.8411 Error: 0.45129 Loss: 0.61135 Threads: 8 Forward time: 3.59s Backward time: 4.52s Step time: 38.12s\n", + "24480000 Examples seen. Accuracy: 0.8425 Error: 0.34489 Loss: 0.37769 Threads: 8 Forward time: 3.96s Backward time: 5.16s Step time: 38.60s\n", + "Starting Validation.\n", + "Epochs: 255 Examples seen:24480000 Validation Accuracy: 0.8091 Validation Error: 0.4791 Validation Loss: 0.6422 Total time: 521.78min\n", + "Epoch time: 1.9299 minutes. 500 epochs: 16.0825 hours.\n", + "Epochs: 255. Working time: 8.70 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily was playing in her backyard. she was walking ar.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "24512000 Examples seen. Accuracy: 0.8494 Error: 0.46203 Loss: 0.54966 Threads: 8 Forward time: 3.36s Backward time: 4.11s Step time: 39.06s\n", + "24544000 Examples seen. Accuracy: 0.8406 Error: 0.48438 Loss: 0.58707 Threads: 8 Forward time: 3.41s Backward time: 4.38s Step time: 37.74s\n", + "24576000 Examples seen. Accuracy: 0.8369 Error: 0.44365 Loss: 0.53191 Threads: 8 Forward time: 3.74s Backward time: 4.44s Step time: 38.43s\n", + "Starting Validation.\n", + "Epochs: 256 Examples seen:24576000 Validation Accuracy: 0.8124 Validation Error: 0.4958 Validation Loss: 0.6240 Total time: 523.75min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.911 Min Weight: -0.937 Max Output: 0.721 Min Output: -0.734 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.721 Min Output: -0.734 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.632 Min Weight: -0.623 Max Output: 3.565 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.17s 0.08s Parent:2\n", + "Layer 4 Max Output: 3.565 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.565 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.339 Min Weight: -0.358 Max Output: 4.028 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.42s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.221 Min Weight: -0.233 Max Output: 2.590 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.66s 0.47s Parent:6\n", + "Layer 8 Max Output: 2.590 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.08s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.127 Min Weight: -0.166 Max Output: 1.479 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.74s 0.46s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.243 Min Weight: -0.199 Max Output: 3.556 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.968 Min Weight: -0.549 Max Output: 14.529 Min Output: -4.193 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.937 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.9214 minutes. 500 epochs: 16.0113 hours.\n", + "Epochs: 256. Working time: 8.73 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim was walking in the park. he was very happy and he.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "24608000 Examples seen. Accuracy: 0.8304 Error: 0.41468 Loss: 0.47865 Threads: 8 Forward time: 3.14s Backward time: 3.99s Step time: 39.20s\n", + "24640000 Examples seen. Accuracy: 0.8478 Error: 0.38567 Loss: 0.47529 Threads: 8 Forward time: 3.32s Backward time: 4.12s Step time: 37.71s\n", + "24672000 Examples seen. Accuracy: 0.8430 Error: 0.46108 Loss: 0.52624 Threads: 8 Forward time: 3.66s Backward time: 4.35s Step time: 38.07s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 257 Examples seen:24672000 Validation Accuracy: 0.8143 Validation Error: 0.4920 Validation Loss: 0.6306 Total time: 525.75min\n", + "Epoch time: 1.9034 minutes. 500 epochs: 15.8617 hours.\n", + "Epochs: 257. Working time: 8.76 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were both was.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "24704000 Examples seen. Accuracy: 0.8440 Error: 0.37957 Loss: 0.46987 Threads: 8 Forward time: 3.27s Backward time: 4.13s Step time: 37.04s\n", + "24736000 Examples seen. Accuracy: 0.8388 Error: 0.43066 Loss: 0.48806 Threads: 8 Forward time: 3.60s Backward time: 4.51s Step time: 37.34s\n", + "24768000 Examples seen. Accuracy: 0.8335 Error: 0.49560 Loss: 0.67286 Threads: 8 Forward time: 3.41s Backward time: 4.48s Step time: 37.22s\n", + "Starting Validation.\n", + "Epochs: 258 Examples seen:24768000 Validation Accuracy: 0.8084 Validation Error: 0.4941 Validation Loss: 0.6345 Total time: 527.65min\n", + "Epoch time: 1.8611 minutes. 500 epochs: 15.5092 hours.\n", + "Epochs: 258. Working time: 8.79 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they wanted to s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "24800000 Examples seen. Accuracy: 0.8351 Error: 0.45051 Loss: 0.54609 Threads: 8 Forward time: 3.48s Backward time: 4.33s Step time: 36.74s\n", + "24832000 Examples seen. Accuracy: 0.8354 Error: 0.46132 Loss: 0.56232 Threads: 8 Forward time: 3.08s Backward time: 3.75s Step time: 35.99s\n", + "24864000 Examples seen. Accuracy: 0.8361 Error: 0.40258 Loss: 0.50555 Threads: 8 Forward time: 3.26s Backward time: 4.12s Step time: 35.80s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 259 Examples seen:24864000 Validation Accuracy: 0.8145 Validation Error: 0.4891 Validation Loss: 0.6402 Total time: 529.55min\n", + "Epoch time: 1.7902 minutes. 500 epochs: 14.9187 hours.\n", + "Epochs: 259. Working time: 8.83 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they were playing .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "24896000 Examples seen. Accuracy: 0.8375 Error: 0.44016 Loss: 0.50149 Threads: 8 Forward time: 3.20s Backward time: 3.94s Step time: 35.47s\n", + "24928000 Examples seen. Accuracy: 0.8343 Error: 0.53779 Loss: 0.69242 Threads: 8 Forward time: 3.24s Backward time: 4.13s Step time: 36.04s\n", + "24960000 Examples seen. Accuracy: 0.8320 Error: 0.48236 Loss: 0.55851 Threads: 8 Forward time: 3.15s Backward time: 3.77s Step time: 35.73s\n", + "Starting Validation.\n", + "Epochs: 260 Examples seen:24960000 Validation Accuracy: 0.8093 Validation Error: 0.4886 Validation Loss: 0.6156 Total time: 531.38min\n", + "Starting Testing.\n", + "Epochs: 260 Examples seen:24960000 Test Accuracy: 0.8093 Test Error: 0.4886 Test Loss: 0.6156 Total time: 531.41min\n", + "Epoch time: 1.7863 minutes. 500 epochs: 14.8858 hours.\n", + "Epochs: 260. Working time: 8.86 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "24992000 Examples seen. Accuracy: 0.8329 Error: 0.45838 Loss: 0.49392 Threads: 8 Forward time: 3.19s Backward time: 3.97s Step time: 35.78s\n", + "25024000 Examples seen. Accuracy: 0.8337 Error: 0.46731 Loss: 0.59654 Threads: 8 Forward time: 3.08s Backward time: 3.75s Step time: 36.43s\n", + "25056000 Examples seen. Accuracy: 0.8362 Error: 0.44800 Loss: 0.53809 Threads: 8 Forward time: 3.48s Backward time: 4.34s Step time: 35.57s\n", + "Starting Validation.\n", + "Epochs: 261 Examples seen:25056000 Validation Accuracy: 0.8109 Validation Error: 0.4984 Validation Loss: 0.6282 Total time: 533.25min\n", + "Epoch time: 1.7786 minutes. 500 epochs: 14.8213 hours.\n", + "Epochs: 261. Working time: 8.89 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "25088000 Examples seen. Accuracy: 0.8431 Error: 0.39243 Loss: 0.47211 Threads: 8 Forward time: 3.21s Backward time: 4.08s Step time: 35.69s\n", + "25120000 Examples seen. Accuracy: 0.8517 Error: 0.39813 Loss: 0.47192 Threads: 8 Forward time: 3.37s Backward time: 4.26s Step time: 35.92s\n", + "25152000 Examples seen. Accuracy: 0.8426 Error: 0.49023 Loss: 0.60253 Threads: 8 Forward time: 3.59s Backward time: 4.30s Step time: 35.91s\n", + "Starting Validation.\n", + "Epochs: 262 Examples seen:25152000 Validation Accuracy: 0.8105 Validation Error: 0.4874 Validation Loss: 0.6361 Total time: 535.09min\n", + "Epoch time: 1.7957 minutes. 500 epochs: 14.9642 hours.\n", + "Epochs: 262. Working time: 8.92 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "25184000 Examples seen. Accuracy: 0.8397 Error: 0.41640 Loss: 0.49582 Threads: 8 Forward time: 3.23s Backward time: 4.03s Step time: 37.28s\n", + "25216000 Examples seen. Accuracy: 0.8355 Error: 0.41630 Loss: 0.52141 Threads: 8 Forward time: 3.33s Backward time: 4.26s Step time: 38.93s\n", + "25248000 Examples seen. Accuracy: 0.8324 Error: 0.49612 Loss: 0.58265 Threads: 8 Forward time: 3.33s Backward time: 4.16s Step time: 37.45s\n", + "Starting Validation.\n", + "Epochs: 263 Examples seen:25248000 Validation Accuracy: 0.8132 Validation Error: 0.4862 Validation Loss: 0.6198 Total time: 537.03min\n", + "Epoch time: 1.8724 minutes. 500 epochs: 15.6029 hours.\n", + "Epochs: 263. Working time: 8.95 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they were so exc.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "25280000 Examples seen. Accuracy: 0.8444 Error: 0.41751 Loss: 0.49855 Threads: 8 Forward time: 3.22s Backward time: 4.05s Step time: 37.13s\n", + "25312000 Examples seen. Accuracy: 0.8454 Error: 0.46958 Loss: 0.58126 Threads: 8 Forward time: 3.20s Backward time: 3.90s Step time: 36.01s\n", + "25344000 Examples seen. Accuracy: 0.8387 Error: 0.51666 Loss: 0.64451 Threads: 8 Forward time: 3.08s Backward time: 3.74s Step time: 35.64s\n", + "Starting Validation.\n", + "Epochs: 264 Examples seen:25344000 Validation Accuracy: 0.8074 Validation Error: 0.5059 Validation Loss: 0.6311 Total time: 538.89min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.908 Min Weight: -0.941 Max Output: 0.720 Min Output: -0.736 TNNetPointwiseConv 81,1,32 Times: 0.22s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.720 Min Output: -0.736 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.636 Min Weight: -0.634 Max Output: 3.641 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.06s Parent:2\n", + "Layer 4 Max Output: 3.641 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.641 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.341 Min Weight: -0.359 Max Output: 4.118 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.25s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.220 Min Weight: -0.232 Max Output: 2.822 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.57s 0.38s Parent:6\n", + "Layer 8 Max Output: 2.822 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.05s 0.00s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.126 Min Weight: -0.167 Max Output: 1.639 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.77s 0.37s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.245 Min Weight: -0.200 Max Output: 3.529 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.973 Min Weight: -0.549 Max Output: 14.993 Min Output: -4.022 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.963 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7820 minutes. 500 epochs: 14.8496 hours.\n", + "Epochs: 264. Working time: 8.98 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big sl.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "25376000 Examples seen. Accuracy: 0.8364 Error: 0.38630 Loss: 0.43177 Threads: 8 Forward time: 3.32s Backward time: 4.16s Step time: 36.20s\n", + "25408000 Examples seen. Accuracy: 0.8361 Error: 0.43549 Loss: 0.50283 Threads: 8 Forward time: 3.47s Backward time: 4.49s Step time: 36.24s\n", + "25440000 Examples seen. Accuracy: 0.8343 Error: 0.46142 Loss: 0.57382 Threads: 8 Forward time: 3.29s Backward time: 4.12s Step time: 35.96s\n", + "Starting Validation.\n", + "Epochs: 265 Examples seen:25440000 Validation Accuracy: 0.8101 Validation Error: 0.4928 Validation Loss: 0.6327 Total time: 540.74min\n", + "Epoch time: 1.7978 minutes. 500 epochs: 14.9817 hours.\n", + "Epochs: 265. Working time: 9.01 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "25472000 Examples seen. Accuracy: 0.8378 Error: 0.42516 Loss: 0.46284 Threads: 8 Forward time: 3.15s Backward time: 3.79s Step time: 35.74s\n", + "25504000 Examples seen. Accuracy: 0.8374 Error: 0.44891 Loss: 0.54098 Threads: 8 Forward time: 3.33s Backward time: 4.10s Step time: 35.58s\n", + "25536000 Examples seen. Accuracy: 0.8353 Error: 0.49026 Loss: 0.64648 Threads: 8 Forward time: 3.32s Backward time: 4.10s Step time: 36.10s\n", + "Starting Validation.\n", + "Epochs: 266 Examples seen:25536000 Validation Accuracy: 0.8143 Validation Error: 0.4852 Validation Loss: 0.6393 Total time: 542.57min\n", + "Epoch time: 1.8051 minutes. 500 epochs: 15.0429 hours.\n", + "Epochs: 266. Working time: 9.04 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they wanted to say.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "25568000 Examples seen. Accuracy: 0.8350 Error: 0.45510 Loss: 0.53908 Threads: 8 Forward time: 3.12s Backward time: 3.84s Step time: 36.31s\n", + "25600000 Examples seen. Accuracy: 0.8364 Error: 0.42802 Loss: 0.52137 Threads: 8 Forward time: 3.36s Backward time: 4.16s Step time: 35.61s\n", + "25632000 Examples seen. Accuracy: 0.8350 Error: 0.50805 Loss: 0.67803 Threads: 8 Forward time: 3.09s Backward time: 3.82s Step time: 36.16s\n", + "Starting Validation.\n", + "Epochs: 267 Examples seen:25632000 Validation Accuracy: 0.8122 Validation Error: 0.4922 Validation Loss: 0.6329 Total time: 544.42min\n", + "Epoch time: 1.8080 minutes. 500 epochs: 15.0667 hours.\n", + "Epochs: 267. Working time: 9.07 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing with her toys. she had a big cat na.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "25664000 Examples seen. Accuracy: 0.8370 Error: 0.40263 Loss: 0.46442 Threads: 8 Forward time: 3.09s Backward time: 3.57s Step time: 35.69s\n", + "25696000 Examples seen. Accuracy: 0.8357 Error: 0.40583 Loss: 0.48273 Threads: 8 Forward time: 3.26s Backward time: 3.97s Step time: 35.95s\n", + "25728000 Examples seen. Accuracy: 0.8370 Error: 0.44241 Loss: 0.52253 Threads: 8 Forward time: 3.28s Backward time: 3.92s Step time: 36.28s\n", + "Starting Validation.\n", + "Epochs: 268 Examples seen:25728000 Validation Accuracy: 0.8105 Validation Error: 0.4926 Validation Loss: 0.6271 Total time: 546.26min\n", + "Epoch time: 1.8141 minutes. 500 epochs: 15.1171 hours.\n", + "Epochs: 268. Working time: 9.10 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named amy was playing with her toys. she was a big strong .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "25760000 Examples seen. Accuracy: 0.8372 Error: 0.48873 Loss: 0.62408 Threads: 8 Forward time: 3.26s Backward time: 4.04s Step time: 36.44s\n", + "25792000 Examples seen. Accuracy: 0.8373 Error: 0.40873 Loss: 0.48552 Threads: 8 Forward time: 3.44s Backward time: 4.31s Step time: 36.08s\n", + "25824000 Examples seen. Accuracy: 0.8361 Error: 0.43165 Loss: 0.50109 Threads: 8 Forward time: 3.76s Backward time: 4.66s Step time: 35.64s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 269 Examples seen:25824000 Validation Accuracy: 0.8159 Validation Error: 0.4897 Validation Loss: 0.6225 Total time: 548.15min\n", + "Epoch time: 1.7820 minutes. 500 epochs: 14.8496 hours.\n", + "Epochs: 269. Working time: 9.14 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily was playing in her garden. she was so excited b.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "25856000 Examples seen. Accuracy: 0.8354 Error: 0.48677 Loss: 0.57618 Threads: 8 Forward time: 3.23s Backward time: 3.86s Step time: 35.89s\n", + "25888000 Examples seen. Accuracy: 0.8372 Error: 0.40956 Loss: 0.53123 Threads: 8 Forward time: 3.43s Backward time: 4.18s Step time: 36.51s\n", + "25920000 Examples seen. Accuracy: 0.8366 Error: 0.43120 Loss: 0.51107 Threads: 8 Forward time: 3.20s Backward time: 3.90s Step time: 36.65s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 270 Examples seen:25920000 Validation Accuracy: 0.8168 Validation Error: 0.4960 Validation Loss: 0.6189 Total time: 550.05min\n", + "Starting Testing.\n", + "Epochs: 270 Examples seen:25920000 Test Accuracy: 0.8168 Test Error: 0.4960 Test Loss: 0.6189 Total time: 550.09min\n", + "Epoch time: 1.8323 minutes. 500 epochs: 15.2696 hours.\n", + "Epochs: 270. Working time: 9.17 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "25952000 Examples seen. Accuracy: 0.8367 Error: 0.37972 Loss: 0.47382 Threads: 8 Forward time: 3.59s Backward time: 4.57s Step time: 36.08s\n", + "25984000 Examples seen. Accuracy: 0.8344 Error: 0.43215 Loss: 0.47606 Threads: 8 Forward time: 3.12s Backward time: 3.74s Step time: 35.37s\n", + "26016000 Examples seen. Accuracy: 0.8454 Error: 0.44685 Loss: 0.51627 Threads: 8 Forward time: 3.35s Backward time: 3.97s Step time: 36.76s\n", + "Starting Validation.\n", + "Epochs: 271 Examples seen:26016000 Validation Accuracy: 0.8130 Validation Error: 0.4790 Validation Loss: 0.6308 Total time: 551.93min\n", + "Epoch time: 1.8382 minutes. 500 epochs: 15.3183 hours.\n", + "Epochs: 271. Working time: 9.20 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mom. the sun was shin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "26048000 Examples seen. Accuracy: 0.8424 Error: 0.46811 Loss: 0.58061 Threads: 8 Forward time: 3.16s Backward time: 3.77s Step time: 37.29s\n", + "26080000 Examples seen. Accuracy: 0.8377 Error: 0.49149 Loss: 0.59149 Threads: 8 Forward time: 3.28s Backward time: 3.88s Step time: 35.72s\n", + "26112000 Examples seen. Accuracy: 0.8349 Error: 0.55153 Loss: 0.73372 Threads: 8 Forward time: 3.23s Backward time: 3.89s Step time: 35.65s\n", + "Starting Validation.\n", + "Epochs: 272 Examples seen:26112000 Validation Accuracy: 0.8153 Validation Error: 0.4833 Validation Loss: 0.6187 Total time: 553.79min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.911 Min Weight: -0.942 Max Output: 0.722 Min Output: -0.736 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.11s Parent:0\n", + "Layer 2 Max Output: 0.722 Min Output: -0.736 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.641 Min Weight: -0.636 Max Output: 3.633 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.05s Parent:2\n", + "Layer 4 Max Output: 3.633 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.633 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.347 Min Weight: -0.359 Max Output: 4.276 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.43s 0.26s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.220 Min Weight: -0.231 Max Output: 2.795 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.56s 0.42s Parent:6\n", + "Layer 8 Max Output: 2.795 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.127 Min Weight: -0.168 Max Output: 1.536 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.76s 0.37s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.247 Min Weight: -0.199 Max Output: 3.374 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.978 Min Weight: -0.553 Max Output: 14.042 Min Output: -4.043 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.877 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7825 minutes. 500 epochs: 14.8542 hours.\n", + "Epochs: 272. Working time: 9.23 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was walking in the park. she saw a big snowl wh.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "26144000 Examples seen. Accuracy: 0.8344 Error: 0.53698 Loss: 0.68673 Threads: 8 Forward time: 3.25s Backward time: 4.18s Step time: 36.40s\n", + "26176000 Examples seen. Accuracy: 0.8380 Error: 0.37871 Loss: 0.44730 Threads: 8 Forward time: 3.13s Backward time: 3.78s Step time: 36.06s\n", + "26208000 Examples seen. Accuracy: 0.8364 Error: 0.41576 Loss: 0.54193 Threads: 8 Forward time: 3.24s Backward time: 3.80s Step time: 36.06s\n", + "Starting Validation.\n", + "Epochs: 273 Examples seen:26208000 Validation Accuracy: 0.8097 Validation Error: 0.4898 Validation Loss: 0.6337 Total time: 555.64min\n", + "Epoch time: 1.8029 minutes. 500 epochs: 15.0246 hours.\n", + "Epochs: 273. Working time: 9.26 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "26240000 Examples seen. Accuracy: 0.8375 Error: 0.37630 Loss: 0.43093 Threads: 8 Forward time: 3.37s Backward time: 4.21s Step time: 36.77s\n", + "26272000 Examples seen. Accuracy: 0.8386 Error: 0.49009 Loss: 0.52257 Threads: 8 Forward time: 3.06s Backward time: 3.64s Step time: 35.99s\n", + "26304000 Examples seen. Accuracy: 0.8382 Error: 0.41170 Loss: 0.49441 Threads: 8 Forward time: 3.10s Backward time: 3.80s Step time: 35.73s\n", + "Starting Validation.\n", + "Epochs: 274 Examples seen:26304000 Validation Accuracy: 0.8103 Validation Error: 0.4884 Validation Loss: 0.6169 Total time: 557.50min\n", + "Epoch time: 1.7866 minutes. 500 epochs: 14.8879 hours.\n", + "Epochs: 274. Working time: 9.29 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mommy. she was so exc.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "26336000 Examples seen. Accuracy: 0.8462 Error: 0.37204 Loss: 0.43671 Threads: 8 Forward time: 3.49s Backward time: 3.63s Step time: 35.00s\n", + "26368000 Examples seen. Accuracy: 0.8511 Error: 0.42188 Loss: 0.50891 Threads: 8 Forward time: 3.37s Backward time: 4.01s Step time: 35.40s\n", + "26400000 Examples seen. Accuracy: 0.8452 Error: 0.36818 Loss: 0.40032 Threads: 8 Forward time: 3.25s Backward time: 4.00s Step time: 36.21s\n", + "Starting Validation.\n", + "Epochs: 275 Examples seen:26400000 Validation Accuracy: 0.8074 Validation Error: 0.4880 Validation Loss: 0.6306 Total time: 559.32min\n", + "Epoch time: 1.8103 minutes. 500 epochs: 15.0854 hours.\n", + "Epochs: 275. Working time: 9.32 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mommy. they saw a big.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "26432000 Examples seen. Accuracy: 0.8407 Error: 0.38948 Loss: 0.49977 Threads: 8 Forward time: 3.34s Backward time: 4.12s Step time: 35.79s\n", + "26464000 Examples seen. Accuracy: 0.8368 Error: 0.47208 Loss: 0.59457 Threads: 8 Forward time: 3.25s Backward time: 3.94s Step time: 36.99s\n", + "26496000 Examples seen. Accuracy: 0.8473 Error: 0.40386 Loss: 0.44693 Threads: 8 Forward time: 3.15s Backward time: 3.76s Step time: 35.03s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 276 Examples seen:26496000 Validation Accuracy: 0.8174 Validation Error: 0.4683 Validation Loss: 0.6199 Total time: 561.20min\n", + "Epoch time: 1.7515 minutes. 500 epochs: 14.5958 hours.\n", + "Epochs: 276. Working time: 9.35 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "26528000 Examples seen. Accuracy: 0.8469 Error: 0.40942 Loss: 0.53222 Threads: 8 Forward time: 3.38s Backward time: 4.17s Step time: 35.34s\n", + "26560000 Examples seen. Accuracy: 0.8453 Error: 0.37579 Loss: 0.42572 Threads: 8 Forward time: 3.14s Backward time: 3.88s Step time: 35.96s\n", + "26592000 Examples seen. Accuracy: 0.8428 Error: 0.34349 Loss: 0.43602 Threads: 8 Forward time: 3.21s Backward time: 3.94s Step time: 34.95s\n", + "Starting Validation.\n", + "Epochs: 277 Examples seen:26592000 Validation Accuracy: 0.8078 Validation Error: 0.4878 Validation Loss: 0.6255 Total time: 563.01min\n", + "Epoch time: 1.7473 minutes. 500 epochs: 14.5608 hours.\n", + "Epochs: 277. Working time: 9.38 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing with her toys in her room. she had .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "26624000 Examples seen. Accuracy: 0.8420 Error: 0.37178 Loss: 0.42576 Threads: 8 Forward time: 3.19s Backward time: 4.00s Step time: 35.05s\n", + "26656000 Examples seen. Accuracy: 0.8406 Error: 0.41197 Loss: 0.47201 Threads: 8 Forward time: 3.65s Backward time: 4.43s Step time: 35.63s\n", + "26688000 Examples seen. Accuracy: 0.8380 Error: 0.36564 Loss: 0.42718 Threads: 8 Forward time: 3.59s Backward time: 4.55s Step time: 36.26s\n", + "Starting Validation.\n", + "Epochs: 278 Examples seen:26688000 Validation Accuracy: 0.8128 Validation Error: 0.4873 Validation Loss: 0.6334 Total time: 564.84min\n", + "Epoch time: 1.8128 minutes. 500 epochs: 15.1063 hours.\n", + "Epochs: 278. Working time: 9.41 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mommy said the street.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "26720000 Examples seen. Accuracy: 0.8355 Error: 0.43978 Loss: 0.48404 Threads: 8 Forward time: 3.26s Backward time: 3.92s Step time: 35.35s\n", + "26752000 Examples seen. Accuracy: 0.8459 Error: 0.43095 Loss: 0.59145 Threads: 8 Forward time: 3.44s Backward time: 4.58s Step time: 35.61s\n", + "26784000 Examples seen. Accuracy: 0.8406 Error: 0.49139 Loss: 0.65712 Threads: 8 Forward time: 3.10s Backward time: 3.69s Step time: 35.82s\n", + "Starting Validation.\n", + "Epochs: 279 Examples seen:26784000 Validation Accuracy: 0.8159 Validation Error: 0.4863 Validation Loss: 0.6283 Total time: 566.66min\n", + "Epoch time: 1.7908 minutes. 500 epochs: 14.9233 hours.\n", + "Epochs: 279. Working time: 9.44 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in her backyard. she saw a big sunn.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "26816000 Examples seen. Accuracy: 0.8389 Error: 0.36631 Loss: 0.43677 Threads: 8 Forward time: 3.47s Backward time: 4.40s Step time: 35.23s\n", + "26848000 Examples seen. Accuracy: 0.8371 Error: 0.42891 Loss: 0.55700 Threads: 8 Forward time: 3.25s Backward time: 3.94s Step time: 35.36s\n", + "26880000 Examples seen. Accuracy: 0.8438 Error: 0.41124 Loss: 0.43421 Threads: 8 Forward time: 3.40s Backward time: 4.28s Step time: 36.21s\n", + "Starting Validation.\n", + "Epochs: 280 Examples seen:26880000 Validation Accuracy: 0.8155 Validation Error: 0.4641 Validation Loss: 0.6285 Total time: 568.49min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.916 Min Weight: -0.948 Max Output: 0.724 Min Output: -0.739 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.12s Parent:0\n", + "Layer 2 Max Output: 0.724 Min Output: -0.739 TNNetPadXY 83,1,32 Times: 0.01s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.647 Min Weight: -0.645 Max Output: 3.687 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.687 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.687 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.346 Min Weight: -0.362 Max Output: 3.931 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.45s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.223 Min Weight: -0.229 Max Output: 3.048 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.62s 0.46s Parent:6\n", + "Layer 8 Max Output: 3.048 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.09s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.127 Min Weight: -0.169 Max Output: 1.676 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.75s 0.41s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.249 Min Weight: -0.199 Max Output: 4.571 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.03s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.981 Min Weight: -0.559 Max Output: 17.922 Min Output: -4.673 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.986 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 280 Examples seen:26880000 Test Accuracy: 0.8155 Test Error: 0.4641 Test Loss: 0.6285 Total time: 568.53min\n", + "Epoch time: 1.8105 minutes. 500 epochs: 15.0875 hours.\n", + "Epochs: 280. Working time: 9.48 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went for a walk in the park. she saw a big box .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "26912000 Examples seen. Accuracy: 0.8465 Error: 0.42197 Loss: 0.54139 Threads: 8 Forward time: 3.81s Backward time: 4.75s Step time: 36.94s\n", + "26944000 Examples seen. Accuracy: 0.8387 Error: 0.44488 Loss: 0.52860 Threads: 8 Forward time: 3.34s Backward time: 4.02s Step time: 36.78s\n", + "26976000 Examples seen. Accuracy: 0.8383 Error: 0.50149 Loss: 0.56653 Threads: 8 Forward time: 3.32s Backward time: 4.04s Step time: 36.26s\n", + "Starting Validation.\n", + "Epochs: 281 Examples seen:26976000 Validation Accuracy: 0.8109 Validation Error: 0.4879 Validation Loss: 0.6229 Total time: 570.40min\n", + "Epoch time: 1.8130 minutes. 500 epochs: 15.1079 hours.\n", + "Epochs: 281. Working time: 9.51 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mommy. they saw a big.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "27008000 Examples seen. Accuracy: 0.8482 Error: 0.40870 Loss: 0.42991 Threads: 8 Forward time: 3.11s Backward time: 3.73s Step time: 35.95s\n", + "27040000 Examples seen. Accuracy: 0.8588 Error: 0.36376 Loss: 0.40075 Threads: 8 Forward time: 3.15s Backward time: 4.02s Step time: 36.57s\n", + "27072000 Examples seen. Accuracy: 0.8533 Error: 0.38855 Loss: 0.44889 Threads: 8 Forward time: 3.40s Backward time: 4.09s Step time: 36.63s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 282 Examples seen:27072000 Validation Accuracy: 0.8193 Validation Error: 0.4754 Validation Loss: 0.6322 Total time: 572.31min\n", + "Epoch time: 1.8313 minutes. 500 epochs: 15.2608 hours.\n", + "Epochs: 282. Working time: 9.54 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily was playing in her backyard. she saw a big box .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "27104000 Examples seen. Accuracy: 0.8446 Error: 0.41955 Loss: 0.55409 Threads: 8 Forward time: 3.13s Backward time: 3.76s Step time: 36.65s\n", + "27136000 Examples seen. Accuracy: 0.8373 Error: 0.45404 Loss: 0.58405 Threads: 8 Forward time: 3.16s Backward time: 3.87s Step time: 35.95s\n", + "27168000 Examples seen. Accuracy: 0.8368 Error: 0.45444 Loss: 0.48981 Threads: 8 Forward time: 3.48s Backward time: 4.17s Step time: 36.71s\n", + "Starting Validation.\n", + "Epochs: 283 Examples seen:27168000 Validation Accuracy: 0.8130 Validation Error: 0.4950 Validation Loss: 0.6188 Total time: 574.17min\n", + "Epoch time: 1.8354 minutes. 500 epochs: 15.2950 hours.\n", + "Epochs: 283. Working time: 9.57 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big sl.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "27200000 Examples seen. Accuracy: 0.8369 Error: 0.41176 Loss: 0.53279 Threads: 8 Forward time: 3.68s Backward time: 4.35s Step time: 35.91s\n", + "27232000 Examples seen. Accuracy: 0.8505 Error: 0.39827 Loss: 0.43549 Threads: 8 Forward time: 3.53s Backward time: 4.22s Step time: 36.01s\n", + "27264000 Examples seen. Accuracy: 0.8477 Error: 0.41616 Loss: 0.57335 Threads: 8 Forward time: 3.49s Backward time: 4.26s Step time: 35.42s\n", + "Starting Validation.\n", + "Epochs: 284 Examples seen:27264000 Validation Accuracy: 0.8120 Validation Error: 0.4843 Validation Loss: 0.6302 Total time: 576.01min\n", + "Epoch time: 1.7709 minutes. 500 epochs: 14.7571 hours.\n", + "Epochs: 284. Working time: 9.60 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim wanted to go for a walk. he saw a big box of the .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "27296000 Examples seen. Accuracy: 0.8405 Error: 0.49389 Loss: 0.62241 Threads: 8 Forward time: 3.32s Backward time: 4.06s Step time: 35.34s\n", + "27328000 Examples seen. Accuracy: 0.8371 Error: 0.42700 Loss: 0.50916 Threads: 8 Forward time: 3.65s Backward time: 4.42s Step time: 36.03s\n", + "27360000 Examples seen. Accuracy: 0.8393 Error: 0.36074 Loss: 0.41627 Threads: 8 Forward time: 3.10s Backward time: 3.71s Step time: 35.30s\n", + "Starting Validation.\n", + "Epochs: 285 Examples seen:27360000 Validation Accuracy: 0.8157 Validation Error: 0.4761 Validation Loss: 0.6091 Total time: 577.83min\n", + "Epoch time: 1.7650 minutes. 500 epochs: 14.7087 hours.\n", + "Epochs: 285. Working time: 9.63 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in her backyard. she was three year.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "27392000 Examples seen. Accuracy: 0.8399 Error: 0.43448 Loss: 0.54386 Threads: 8 Forward time: 3.15s Backward time: 3.71s Step time: 35.07s\n", + "27424000 Examples seen. Accuracy: 0.8512 Error: 0.43206 Loss: 0.52351 Threads: 8 Forward time: 3.43s Backward time: 4.13s Step time: 35.94s\n", + "27456000 Examples seen. Accuracy: 0.8466 Error: 0.41073 Loss: 0.52676 Threads: 8 Forward time: 3.19s Backward time: 3.93s Step time: 36.60s\n", + "Starting Validation.\n", + "Epochs: 286 Examples seen:27456000 Validation Accuracy: 0.8105 Validation Error: 0.4875 Validation Loss: 0.6268 Total time: 579.67min\n", + "Epoch time: 1.8301 minutes. 500 epochs: 15.2508 hours.\n", + "Epochs: 286. Working time: 9.66 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they saw a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "27488000 Examples seen. Accuracy: 0.8424 Error: 0.49517 Loss: 0.61717 Threads: 8 Forward time: 3.56s Backward time: 4.50s Step time: 37.04s\n", + "27520000 Examples seen. Accuracy: 0.8394 Error: 0.47721 Loss: 0.54376 Threads: 8 Forward time: 3.23s Backward time: 3.97s Step time: 36.45s\n", + "27552000 Examples seen. Accuracy: 0.8365 Error: 0.37264 Loss: 0.42039 Threads: 8 Forward time: 3.16s Backward time: 3.73s Step time: 36.24s\n", + "Starting Validation.\n", + "Epochs: 287 Examples seen:27552000 Validation Accuracy: 0.8159 Validation Error: 0.4776 Validation Loss: 0.6268 Total time: 581.54min\n", + "Epoch time: 1.8122 minutes. 500 epochs: 15.1013 hours.\n", + "Epochs: 287. Working time: 9.69 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sarah was walking through the park with her mom and .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "27584000 Examples seen. Accuracy: 0.8368 Error: 0.38530 Loss: 0.44078 Threads: 8 Forward time: 3.26s Backward time: 3.99s Step time: 36.43s\n", + "27616000 Examples seen. Accuracy: 0.8392 Error: 0.45438 Loss: 0.57166 Threads: 8 Forward time: 3.11s Backward time: 3.80s Step time: 35.36s\n", + "27648000 Examples seen. Accuracy: 0.8385 Error: 0.38834 Loss: 0.43809 Threads: 8 Forward time: 3.29s Backward time: 3.90s Step time: 35.02s\n", + "Starting Validation.\n", + "Epochs: 288 Examples seen:27648000 Validation Accuracy: 0.8111 Validation Error: 0.4877 Validation Loss: 0.6219 Total time: 583.37min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.915 Min Weight: -0.950 Max Output: 0.723 Min Output: -0.740 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.08s Parent:0\n", + "Layer 2 Max Output: 0.723 Min Output: -0.740 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.647 Min Weight: -0.654 Max Output: 3.754 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.13s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.754 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.754 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.01s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.353 Min Weight: -0.363 Max Output: 4.371 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.41s 0.26s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.228 Min Weight: -0.232 Max Output: 2.891 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.55s 0.41s Parent:6\n", + "Layer 8 Max Output: 2.891 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.00s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.126 Min Weight: -0.171 Max Output: 1.755 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.71s 0.39s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.251 Min Weight: -0.199 Max Output: 3.742 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.986 Min Weight: -0.562 Max Output: 14.976 Min Output: -4.015 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.959 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7508 minutes. 500 epochs: 14.5896 hours.\n", + "Epochs: 288. Working time: 9.72 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "27680000 Examples seen. Accuracy: 0.8398 Error: 0.39571 Loss: 0.46305 Threads: 8 Forward time: 3.29s Backward time: 4.00s Step time: 36.34s\n", + "27712000 Examples seen. Accuracy: 0.8383 Error: 0.43876 Loss: 0.54900 Threads: 8 Forward time: 3.58s Backward time: 4.41s Step time: 35.81s\n", + "27744000 Examples seen. Accuracy: 0.8406 Error: 0.41156 Loss: 0.57253 Threads: 8 Forward time: 3.24s Backward time: 3.96s Step time: 35.90s\n", + "Starting Validation.\n", + "Epochs: 289 Examples seen:27744000 Validation Accuracy: 0.8162 Validation Error: 0.4885 Validation Loss: 0.6143 Total time: 585.21min\n", + "Epoch time: 1.7949 minutes. 500 epochs: 14.9579 hours.\n", + "Epochs: 289. Working time: 9.75 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy went to the park. she wanted to go on a sunny d.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "27776000 Examples seen. Accuracy: 0.8456 Error: 0.39224 Loss: 0.50444 Threads: 8 Forward time: 3.12s Backward time: 3.67s Step time: 36.01s\n", + "27808000 Examples seen. Accuracy: 0.8534 Error: 0.37717 Loss: 0.42411 Threads: 8 Forward time: 3.15s Backward time: 3.94s Step time: 35.70s\n", + "27840000 Examples seen. Accuracy: 0.8489 Error: 0.43796 Loss: 0.55895 Threads: 8 Forward time: 3.29s Backward time: 4.00s Step time: 35.67s\n", + "Starting Validation.\n", + "Epochs: 290 Examples seen:27840000 Validation Accuracy: 0.8153 Validation Error: 0.4786 Validation Loss: 0.6257 Total time: 587.05min\n", + "Starting Testing.\n", + "Epochs: 290 Examples seen:27840000 Test Accuracy: 0.8153 Test Error: 0.4786 Test Loss: 0.6257 Total time: 587.09min\n", + "Epoch time: 1.7834 minutes. 500 epochs: 14.8617 hours.\n", + "Epochs: 290. Working time: 9.78 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mommy. they saw a big.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "27872000 Examples seen. Accuracy: 0.8443 Error: 0.40835 Loss: 0.44103 Threads: 8 Forward time: 3.17s Backward time: 3.75s Step time: 35.81s\n", + "27904000 Examples seen. Accuracy: 0.8401 Error: 0.50679 Loss: 0.60977 Threads: 8 Forward time: 3.11s Backward time: 3.76s Step time: 34.81s\n", + "27936000 Examples seen. Accuracy: 0.8380 Error: 0.42387 Loss: 0.50503 Threads: 8 Forward time: 3.31s Backward time: 4.23s Step time: 35.74s\n", + "Starting Validation.\n", + "Epochs: 291 Examples seen:27936000 Validation Accuracy: 0.8149 Validation Error: 0.4906 Validation Loss: 0.6152 Total time: 588.90min\n", + "Epoch time: 1.7869 minutes. 500 epochs: 14.8908 hours.\n", + "Epochs: 291. Working time: 9.82 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big b.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "27968000 Examples seen. Accuracy: 0.8405 Error: 0.41726 Loss: 0.48534 Threads: 8 Forward time: 3.27s Backward time: 3.94s Step time: 35.77s\n", + "28000000 Examples seen. Accuracy: 0.8496 Error: 0.43676 Loss: 0.50975 Threads: 8 Forward time: 3.22s Backward time: 3.86s Step time: 36.83s\n", + "28032000 Examples seen. Accuracy: 0.8481 Error: 0.42838 Loss: 0.48473 Threads: 8 Forward time: 3.39s Backward time: 4.21s Step time: 36.55s\n", + "Starting Validation.\n", + "Epochs: 292 Examples seen:28032000 Validation Accuracy: 0.8159 Validation Error: 0.4782 Validation Loss: 0.6165 Total time: 590.77min\n", + "Epoch time: 1.8274 minutes. 500 epochs: 15.2283 hours.\n", + "Epochs: 292. Working time: 9.85 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was playing in her garden. she was so excited b.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28064000 Examples seen. Accuracy: 0.8459 Error: 0.39421 Loss: 0.46690 Threads: 8 Forward time: 3.05s Backward time: 3.76s Step time: 35.60s\n", + "28096000 Examples seen. Accuracy: 0.8424 Error: 0.48530 Loss: 0.57749 Threads: 8 Forward time: 3.07s Backward time: 3.60s Step time: 34.78s\n", + "28128000 Examples seen. Accuracy: 0.8543 Error: 0.45518 Loss: 0.55390 Threads: 8 Forward time: 3.28s Backward time: 4.07s Step time: 35.47s\n", + "Starting Validation.\n", + "Epochs: 293 Examples seen:28128000 Validation Accuracy: 0.8109 Validation Error: 0.4710 Validation Loss: 0.6311 Total time: 592.57min\n", + "Epoch time: 1.7735 minutes. 500 epochs: 14.7792 hours.\n", + "Epochs: 293. Working time: 9.88 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big b.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28160000 Examples seen. Accuracy: 0.8530 Error: 0.40814 Loss: 0.46499 Threads: 8 Forward time: 3.45s Backward time: 4.28s Step time: 35.85s\n", + "28192000 Examples seen. Accuracy: 0.8468 Error: 0.39354 Loss: 0.45745 Threads: 8 Forward time: 3.11s Backward time: 3.82s Step time: 35.00s\n", + "28224000 Examples seen. Accuracy: 0.8422 Error: 0.44341 Loss: 0.55665 Threads: 8 Forward time: 3.45s Backward time: 4.30s Step time: 36.05s\n", + "Starting Validation.\n", + "Epochs: 294 Examples seen:28224000 Validation Accuracy: 0.8149 Validation Error: 0.4822 Validation Loss: 0.6185 Total time: 594.40min\n", + "Epoch time: 1.8025 minutes. 500 epochs: 15.0208 hours.\n", + "Epochs: 294. Working time: 9.91 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big sli.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "28256000 Examples seen. Accuracy: 0.8420 Error: 0.40957 Loss: 0.50332 Threads: 8 Forward time: 3.09s Backward time: 3.69s Step time: 35.02s\n", + "28288000 Examples seen. Accuracy: 0.8413 Error: 0.49059 Loss: 0.61617 Threads: 8 Forward time: 3.36s Backward time: 4.24s Step time: 35.48s\n", + "28320000 Examples seen. Accuracy: 0.8402 Error: 0.40170 Loss: 0.48510 Threads: 8 Forward time: 3.35s Backward time: 4.09s Step time: 35.18s\n", + "Starting Validation.\n", + "Epochs: 295 Examples seen:28320000 Validation Accuracy: 0.8147 Validation Error: 0.4832 Validation Loss: 0.6225 Total time: 596.20min\n", + "Epoch time: 1.7589 minutes. 500 epochs: 14.6571 hours.\n", + "Epochs: 295. Working time: 9.94 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28352000 Examples seen. Accuracy: 0.8483 Error: 0.42589 Loss: 0.48947 Threads: 8 Forward time: 3.27s Backward time: 4.06s Step time: 35.22s\n", + "28384000 Examples seen. Accuracy: 0.8434 Error: 0.38906 Loss: 0.43929 Threads: 8 Forward time: 3.15s Backward time: 3.83s Step time: 35.50s\n", + "28416000 Examples seen. Accuracy: 0.8403 Error: 0.50714 Loss: 0.75169 Threads: 8 Forward time: 3.37s Backward time: 4.12s Step time: 34.53s\n", + "Starting Validation.\n", + "Epochs: 296 Examples seen:28416000 Validation Accuracy: 0.8162 Validation Error: 0.4851 Validation Loss: 0.6270 Total time: 598.00min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.914 Min Weight: -0.955 Max Output: 0.723 Min Output: -0.742 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.723 Min Output: -0.742 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.649 Min Weight: -0.652 Max Output: 3.759 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.06s Parent:2\n", + "Layer 4 Max Output: 3.759 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.759 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.352 Min Weight: -0.366 Max Output: 4.201 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.38s 0.29s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.229 Min Weight: -0.231 Max Output: 2.821 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.53s 0.45s Parent:6\n", + "Layer 8 Max Output: 2.821 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.126 Min Weight: -0.172 Max Output: 1.633 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.71s 0.39s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.253 Min Weight: -0.202 Max Output: 3.849 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.990 Min Weight: -0.566 Max Output: 16.225 Min Output: -4.451 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.973 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7265 minutes. 500 epochs: 14.3871 hours.\n", + "Epochs: 296. Working time: 9.97 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28448000 Examples seen. Accuracy: 0.8541 Error: 0.36609 Loss: 0.40109 Threads: 8 Forward time: 3.26s Backward time: 4.24s Step time: 36.16s\n", + "28480000 Examples seen. Accuracy: 0.8518 Error: 0.36648 Loss: 0.39887 Threads: 8 Forward time: 3.39s Backward time: 4.31s Step time: 35.67s\n", + "28512000 Examples seen. Accuracy: 0.8475 Error: 0.45174 Loss: 0.51785 Threads: 8 Forward time: 3.50s Backward time: 4.37s Step time: 35.68s\n", + "Starting Validation.\n", + "Epochs: 297 Examples seen:28512000 Validation Accuracy: 0.8139 Validation Error: 0.4787 Validation Loss: 0.6257 Total time: 599.84min\n", + "Epoch time: 1.7841 minutes. 500 epochs: 14.8675 hours.\n", + "Epochs: 297. Working time: 10.00 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sand and saw.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28544000 Examples seen. Accuracy: 0.8612 Error: 0.37136 Loss: 0.41765 Threads: 8 Forward time: 3.25s Backward time: 4.07s Step time: 35.62s\n", + "28576000 Examples seen. Accuracy: 0.8592 Error: 0.40193 Loss: 0.49640 Threads: 8 Forward time: 3.72s Backward time: 4.41s Step time: 35.86s\n", + "28608000 Examples seen. Accuracy: 0.8476 Error: 0.42730 Loss: 0.48343 Threads: 8 Forward time: 3.35s Backward time: 4.16s Step time: 35.60s\n", + "Starting Validation.\n", + "Epochs: 298 Examples seen:28608000 Validation Accuracy: 0.8126 Validation Error: 0.4815 Validation Loss: 0.6249 Total time: 601.66min\n", + "Epoch time: 1.7802 minutes. 500 epochs: 14.8350 hours.\n", + "Epochs: 298. Working time: 10.03 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mom. the sky and saw .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28640000 Examples seen. Accuracy: 0.8437 Error: 0.39570 Loss: 0.44604 Threads: 8 Forward time: 3.02s Backward time: 3.49s Step time: 35.02s\n", + "28672000 Examples seen. Accuracy: 0.8412 Error: 0.41660 Loss: 0.51691 Threads: 8 Forward time: 3.31s Backward time: 3.93s Step time: 35.43s\n", + "28704000 Examples seen. Accuracy: 0.8403 Error: 0.38324 Loss: 0.42371 Threads: 8 Forward time: 3.03s Backward time: 3.58s Step time: 35.20s\n", + "Starting Validation.\n", + "Epochs: 299 Examples seen:28704000 Validation Accuracy: 0.8141 Validation Error: 0.4821 Validation Loss: 0.6234 Total time: 603.47min\n", + "Epoch time: 1.7601 minutes. 500 epochs: 14.6675 hours.\n", + "Epochs: 299. Working time: 10.06 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28736000 Examples seen. Accuracy: 0.8414 Error: 0.46100 Loss: 0.56639 Threads: 8 Forward time: 3.53s Backward time: 4.36s Step time: 34.65s\n", + "28768000 Examples seen. Accuracy: 0.8400 Error: 0.44301 Loss: 0.54886 Threads: 8 Forward time: 3.26s Backward time: 3.99s Step time: 35.19s\n", + "28800000 Examples seen. Accuracy: 0.8391 Error: 0.41752 Loss: 0.48164 Threads: 8 Forward time: 3.24s Backward time: 3.74s Step time: 36.02s\n", + "Starting Validation.\n", + "Epochs: 300 Examples seen:28800000 Validation Accuracy: 0.8162 Validation Error: 0.4843 Validation Loss: 0.6200 Total time: 605.28min\n", + "Starting Testing.\n", + "Epochs: 300 Examples seen:28800000 Test Accuracy: 0.8162 Test Error: 0.4843 Test Loss: 0.6200 Total time: 605.31min\n", + "Epoch time: 1.8010 minutes. 500 epochs: 15.0083 hours.\n", + "Epochs: 300. Working time: 10.09 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big stread with her.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28832000 Examples seen. Accuracy: 0.8379 Error: 0.37302 Loss: 0.43483 Threads: 8 Forward time: 3.26s Backward time: 4.01s Step time: 34.80s\n", + "28864000 Examples seen. Accuracy: 0.8407 Error: 0.35461 Loss: 0.40872 Threads: 8 Forward time: 3.50s Backward time: 4.07s Step time: 35.65s\n", + "28896000 Examples seen. Accuracy: 0.8395 Error: 0.48309 Loss: 0.57357 Threads: 8 Forward time: 3.19s Backward time: 3.88s Step time: 35.87s\n", + "Starting Validation.\n", + "Epochs: 301 Examples seen:28896000 Validation Accuracy: 0.8124 Validation Error: 0.4817 Validation Loss: 0.6195 Total time: 607.13min\n", + "Epoch time: 1.7936 minutes. 500 epochs: 14.9471 hours.\n", + "Epochs: 301. Working time: 10.12 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "28928000 Examples seen. Accuracy: 0.8419 Error: 0.46471 Loss: 0.52201 Threads: 8 Forward time: 3.08s Backward time: 3.75s Step time: 35.53s\n", + "28960000 Examples seen. Accuracy: 0.8403 Error: 0.46517 Loss: 0.51535 Threads: 8 Forward time: 3.26s Backward time: 3.94s Step time: 35.29s\n", + "28992000 Examples seen. Accuracy: 0.8396 Error: 0.42274 Loss: 0.47420 Threads: 8 Forward time: 3.41s Backward time: 4.25s Step time: 35.25s\n", + "Starting Validation.\n", + "Epochs: 302 Examples seen:28992000 Validation Accuracy: 0.8128 Validation Error: 0.4774 Validation Loss: 0.6148 Total time: 608.94min\n", + "Epoch time: 1.7624 minutes. 500 epochs: 14.6867 hours.\n", + "Epochs: 302. Working time: 10.15 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "29024000 Examples seen. Accuracy: 0.8411 Error: 0.36062 Loss: 0.39871 Threads: 8 Forward time: 3.50s Backward time: 4.32s Step time: 36.92s\n", + "29056000 Examples seen. Accuracy: 0.8381 Error: 0.40722 Loss: 0.51936 Threads: 8 Forward time: 3.80s Backward time: 4.61s Step time: 36.41s\n", + "29088000 Examples seen. Accuracy: 0.8395 Error: 0.45405 Loss: 0.52915 Threads: 8 Forward time: 3.74s Backward time: 4.65s Step time: 38.51s\n", + "Starting Validation.\n", + "Epochs: 303 Examples seen:29088000 Validation Accuracy: 0.8151 Validation Error: 0.4833 Validation Loss: 0.6207 Total time: 610.84min\n", + "Epoch time: 1.9255 minutes. 500 epochs: 16.0462 hours.\n", + "Epochs: 303. Working time: 10.18 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park. she saw a big box of three ye.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "29120000 Examples seen. Accuracy: 0.8391 Error: 0.44222 Loss: 0.51315 Threads: 8 Forward time: 3.69s Backward time: 4.57s Step time: 37.39s\n", + "29152000 Examples seen. Accuracy: 0.8395 Error: 0.44572 Loss: 0.49167 Threads: 8 Forward time: 3.16s Backward time: 3.86s Step time: 38.26s\n", + "29184000 Examples seen. Accuracy: 0.8508 Error: 0.41403 Loss: 0.48258 Threads: 8 Forward time: 3.28s Backward time: 4.02s Step time: 38.00s\n", + "Starting Validation.\n", + "Epochs: 304 Examples seen:29184000 Validation Accuracy: 0.8151 Validation Error: 0.4643 Validation Loss: 0.6240 Total time: 612.79min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.920 Min Weight: -0.957 Max Output: 0.726 Min Output: -0.743 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.726 Min Output: -0.743 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.656 Min Weight: -0.661 Max Output: 3.784 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.784 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.784 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.351 Min Weight: -0.368 Max Output: 3.946 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.48s 0.27s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.227 Min Weight: -0.232 Max Output: 2.988 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.55s 0.44s Parent:6\n", + "Layer 8 Max Output: 2.988 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.00s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.127 Min Weight: -0.173 Max Output: 1.826 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.76s 0.40s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.255 Min Weight: -0.202 Max Output: 3.994 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 0.996 Min Weight: -0.570 Max Output: 16.070 Min Output: -4.642 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.921 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8999 minutes. 500 epochs: 15.8329 hours.\n", + "Epochs: 304. Working time: 10.21 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29216000 Examples seen. Accuracy: 0.8527 Error: 0.39709 Loss: 0.46066 Threads: 8 Forward time: 3.45s Backward time: 4.29s Step time: 37.92s\n", + "29248000 Examples seen. Accuracy: 0.8479 Error: 0.39700 Loss: 0.44057 Threads: 8 Forward time: 3.16s Backward time: 3.83s Step time: 35.97s\n", + "29280000 Examples seen. Accuracy: 0.8438 Error: 0.43030 Loss: 0.55939 Threads: 8 Forward time: 3.41s Backward time: 4.18s Step time: 36.26s\n", + "Starting Validation.\n", + "Epochs: 305 Examples seen:29280000 Validation Accuracy: 0.8141 Validation Error: 0.4866 Validation Loss: 0.6222 Total time: 614.67min\n", + "Epoch time: 1.8130 minutes. 500 epochs: 15.1083 hours.\n", + "Epochs: 305. Working time: 10.24 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big box of the star.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29312000 Examples seen. Accuracy: 0.8500 Error: 0.44162 Loss: 0.55355 Threads: 8 Forward time: 3.42s Backward time: 4.29s Step time: 36.56s\n", + "29344000 Examples seen. Accuracy: 0.8628 Error: 0.36424 Loss: 0.40443 Threads: 8 Forward time: 3.25s Backward time: 4.06s Step time: 37.11s\n", + "29376000 Examples seen. Accuracy: 0.8562 Error: 0.40131 Loss: 0.45807 Threads: 8 Forward time: 3.44s Backward time: 4.09s Step time: 36.45s\n", + "Starting Validation.\n", + "Epochs: 306 Examples seen:29376000 Validation Accuracy: 0.8107 Validation Error: 0.4797 Validation Loss: 0.6220 Total time: 616.55min\n", + "Epoch time: 1.8225 minutes. 500 epochs: 15.1879 hours.\n", + "Epochs: 306. Working time: 10.28 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. the sky and a smal.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "29408000 Examples seen. Accuracy: 0.8464 Error: 0.47518 Loss: 0.60080 Threads: 8 Forward time: 3.76s Backward time: 4.71s Step time: 36.39s\n", + "29440000 Examples seen. Accuracy: 0.8415 Error: 0.42831 Loss: 0.51733 Threads: 8 Forward time: 3.47s Backward time: 4.17s Step time: 37.19s\n", + "29472000 Examples seen. Accuracy: 0.8410 Error: 0.45372 Loss: 0.57659 Threads: 8 Forward time: 3.14s Backward time: 3.84s Step time: 36.62s\n", + "Starting Validation.\n", + "Epochs: 307 Examples seen:29472000 Validation Accuracy: 0.8122 Validation Error: 0.4789 Validation Loss: 0.6156 Total time: 618.43min\n", + "Epoch time: 1.8308 minutes. 500 epochs: 15.2567 hours.\n", + "Epochs: 307. Working time: 10.31 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named mia went to the park with her mom. the sky and saw a.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29504000 Examples seen. Accuracy: 0.8422 Error: 0.33386 Loss: 0.39950 Threads: 8 Forward time: 3.24s Backward time: 3.91s Step time: 35.44s\n", + "29536000 Examples seen. Accuracy: 0.8522 Error: 0.45249 Loss: 0.50809 Threads: 8 Forward time: 3.45s Backward time: 4.34s Step time: 35.80s\n", + "29568000 Examples seen. Accuracy: 0.8589 Error: 0.39257 Loss: 0.43040 Threads: 8 Forward time: 3.27s Backward time: 3.96s Step time: 36.29s\n", + "Starting Validation.\n", + "Epochs: 308 Examples seen:29568000 Validation Accuracy: 0.8166 Validation Error: 0.4652 Validation Loss: 0.6149 Total time: 620.26min\n", + "Epoch time: 1.8146 minutes. 500 epochs: 15.1221 hours.\n", + "Epochs: 308. Working time: 10.34 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they wanted to go .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29600000 Examples seen. Accuracy: 0.8643 Error: 0.40611 Loss: 0.48607 Threads: 8 Forward time: 3.40s Backward time: 4.37s Step time: 36.21s\n", + "29632000 Examples seen. Accuracy: 0.8571 Error: 0.40168 Loss: 0.51025 Threads: 8 Forward time: 3.45s Backward time: 4.30s Step time: 36.18s\n", + "29664000 Examples seen. Accuracy: 0.8526 Error: 0.43649 Loss: 0.51338 Threads: 8 Forward time: 3.54s Backward time: 4.49s Step time: 36.09s\n", + "Starting Validation.\n", + "Epochs: 309 Examples seen:29664000 Validation Accuracy: 0.8139 Validation Error: 0.4765 Validation Loss: 0.6192 Total time: 622.11min\n", + "Epoch time: 1.8046 minutes. 500 epochs: 15.0379 hours.\n", + "Epochs: 309. Working time: 10.37 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29696000 Examples seen. Accuracy: 0.8640 Error: 0.33215 Loss: 0.33215 Threads: 8 Forward time: 3.26s Backward time: 4.09s Step time: 35.92s\n", + "29728000 Examples seen. Accuracy: 0.8649 Error: 0.39324 Loss: 0.43411 Threads: 8 Forward time: 3.27s Backward time: 4.07s Step time: 36.01s\n", + "29760000 Examples seen. Accuracy: 0.8506 Error: 0.42290 Loss: 0.54921 Threads: 8 Forward time: 3.12s Backward time: 3.71s Step time: 35.76s\n", + "Starting Validation.\n", + "Epochs: 310 Examples seen:29760000 Validation Accuracy: 0.8149 Validation Error: 0.4730 Validation Loss: 0.6156 Total time: 623.95min\n", + "Starting Testing.\n", + "Epochs: 310 Examples seen:29760000 Test Accuracy: 0.8149 Test Error: 0.4730 Test Loss: 0.6156 Total time: 623.99min\n", + "Epoch time: 1.7879 minutes. 500 epochs: 14.8996 hours.\n", + "Epochs: 310. Working time: 10.40 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were somethin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29792000 Examples seen. Accuracy: 0.8519 Error: 0.30748 Loss: 0.34494 Threads: 8 Forward time: 3.46s Backward time: 4.37s Step time: 35.51s\n", + "29824000 Examples seen. Accuracy: 0.8607 Error: 0.38823 Loss: 0.43482 Threads: 8 Forward time: 3.40s Backward time: 4.33s Step time: 35.58s\n", + "29856000 Examples seen. Accuracy: 0.8550 Error: 0.48106 Loss: 0.53786 Threads: 8 Forward time: 3.20s Backward time: 3.82s Step time: 35.31s\n", + "Starting Validation.\n", + "Epochs: 311 Examples seen:29856000 Validation Accuracy: 0.8143 Validation Error: 0.4740 Validation Loss: 0.6102 Total time: 625.80min\n", + "Epoch time: 1.7656 minutes. 500 epochs: 14.7137 hours.\n", + "Epochs: 311. Working time: 10.43 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were singing .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29888000 Examples seen. Accuracy: 0.8447 Error: 0.41234 Loss: 0.45332 Threads: 8 Forward time: 3.41s Backward time: 4.22s Step time: 35.56s\n", + "29920000 Examples seen. Accuracy: 0.8419 Error: 0.43474 Loss: 0.47939 Threads: 8 Forward time: 3.19s Backward time: 3.81s Step time: 35.18s\n", + "29952000 Examples seen. Accuracy: 0.8403 Error: 0.41427 Loss: 0.51106 Threads: 8 Forward time: 3.85s Backward time: 4.62s Step time: 35.22s\n", + "Starting Validation.\n", + "Epochs: 312 Examples seen:29952000 Validation Accuracy: 0.8128 Validation Error: 0.4831 Validation Loss: 0.6220 Total time: 627.62min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.931 Min Weight: -0.962 Max Output: 0.731 Min Output: -0.745 TNNetPointwiseConv 81,1,32 Times: 0.22s 0.08s Parent:0\n", + "Layer 2 Max Output: 0.731 Min Output: -0.745 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.665 Min Weight: -0.666 Max Output: 3.862 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.862 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.862 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.353 Min Weight: -0.367 Max Output: 4.171 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.46s 0.32s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.232 Min Weight: -0.231 Max Output: 3.077 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.62s 0.51s Parent:6\n", + "Layer 8 Max Output: 3.077 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.130 Min Weight: -0.177 Max Output: 1.647 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.77s 0.44s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.256 Min Weight: -0.204 Max Output: 4.162 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.005 Min Weight: -0.575 Max Output: 17.425 Min Output: -4.371 TNNetFullConnectLinear 128,1,1 Times: 0.05s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.990 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7610 minutes. 500 epochs: 14.6746 hours.\n", + "Epochs: 312. Working time: 10.46 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they were sharl .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "29984000 Examples seen. Accuracy: 0.8402 Error: 0.38540 Loss: 0.40853 Threads: 8 Forward time: 3.37s Backward time: 4.12s Step time: 36.29s\n", + "30016000 Examples seen. Accuracy: 0.8397 Error: 0.47621 Loss: 0.56140 Threads: 8 Forward time: 3.53s Backward time: 4.27s Step time: 34.84s\n", + "30048000 Examples seen. Accuracy: 0.8516 Error: 0.35499 Loss: 0.41653 Threads: 8 Forward time: 3.43s Backward time: 4.29s Step time: 35.02s\n", + "Starting Validation.\n", + "Epochs: 313 Examples seen:30048000 Validation Accuracy: 0.8128 Validation Error: 0.4551 Validation Loss: 0.6183 Total time: 629.43min\n", + "Epoch time: 1.7508 minutes. 500 epochs: 14.5896 hours.\n", + "Epochs: 313. Working time: 10.49 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim was playing in his backyard. he was so excited be.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "30080000 Examples seen. Accuracy: 0.8568 Error: 0.37305 Loss: 0.41088 Threads: 8 Forward time: 3.60s Backward time: 4.66s Step time: 35.71s\n", + "30112000 Examples seen. Accuracy: 0.8467 Error: 0.38916 Loss: 0.53592 Threads: 8 Forward time: 3.32s Backward time: 3.94s Step time: 35.05s\n", + "30144000 Examples seen. Accuracy: 0.8427 Error: 0.42041 Loss: 0.48451 Threads: 8 Forward time: 3.40s Backward time: 4.09s Step time: 34.29s\n", + "Starting Validation.\n", + "Epochs: 314 Examples seen:30144000 Validation Accuracy: 0.8180 Validation Error: 0.4752 Validation Loss: 0.6092 Total time: 631.22min\n", + "Epoch time: 1.7146 minutes. 500 epochs: 14.2883 hours.\n", + "Epochs: 314. Working time: 10.52 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "30176000 Examples seen. Accuracy: 0.8398 Error: 0.48955 Loss: 0.53533 Threads: 8 Forward time: 3.65s Backward time: 4.53s Step time: 34.94s\n", + "30208000 Examples seen. Accuracy: 0.8401 Error: 0.43519 Loss: 0.57103 Threads: 8 Forward time: 3.16s Backward time: 3.72s Step time: 35.57s\n", + "30240000 Examples seen. Accuracy: 0.8480 Error: 0.41379 Loss: 0.50381 Threads: 8 Forward time: 3.29s Backward time: 3.98s Step time: 34.51s\n", + "Starting Validation.\n", + "Epochs: 315 Examples seen:30240000 Validation Accuracy: 0.8187 Validation Error: 0.4619 Validation Loss: 0.6102 Total time: 633.02min\n", + "Epoch time: 1.7253 minutes. 500 epochs: 14.3779 hours.\n", + "Epochs: 315. Working time: 10.55 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they were going to.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "30272000 Examples seen. Accuracy: 0.8478 Error: 0.45306 Loss: 0.56385 Threads: 8 Forward time: 3.38s Backward time: 4.11s Step time: 36.32s\n", + "30304000 Examples seen. Accuracy: 0.8457 Error: 0.48742 Loss: 0.64388 Threads: 8 Forward time: 3.37s Backward time: 3.89s Step time: 35.43s\n", + "30336000 Examples seen. Accuracy: 0.8435 Error: 0.45453 Loss: 0.58422 Threads: 8 Forward time: 3.45s Backward time: 4.37s Step time: 35.22s\n", + "Starting Validation.\n", + "Epochs: 316 Examples seen:30336000 Validation Accuracy: 0.8193 Validation Error: 0.4715 Validation Loss: 0.6112 Total time: 634.84min\n", + "Epoch time: 1.7608 minutes. 500 epochs: 14.6733 hours.\n", + "Epochs: 316. Working time: 10.58 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom and dad. they wante.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "30368000 Examples seen. Accuracy: 0.8434 Error: 0.44791 Loss: 0.56256 Threads: 8 Forward time: 2.98s Backward time: 3.53s Step time: 35.93s\n", + "30400000 Examples seen. Accuracy: 0.8415 Error: 0.46155 Loss: 0.56077 Threads: 8 Forward time: 3.12s Backward time: 3.58s Step time: 35.34s\n", + "30432000 Examples seen. Accuracy: 0.8416 Error: 0.41167 Loss: 0.48998 Threads: 8 Forward time: 3.40s Backward time: 4.12s Step time: 35.72s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 317 Examples seen:30432000 Validation Accuracy: 0.8210 Validation Error: 0.4800 Validation Loss: 0.6025 Total time: 636.71min\n", + "Epoch time: 1.7862 minutes. 500 epochs: 14.8846 hours.\n", + "Epochs: 317. Working time: 10.61 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside w.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play outside w.\n", + "Max prediction pos is: 81\n", + "30464000 Examples seen. Accuracy: 0.8405 Error: 0.39881 Loss: 0.47241 Threads: 8 Forward time: 3.62s Backward time: 4.47s Step time: 35.58s\n", + "30496000 Examples seen. Accuracy: 0.8427 Error: 0.41172 Loss: 0.50189 Threads: 8 Forward time: 3.32s Backward time: 3.96s Step time: 35.30s\n", + "30528000 Examples seen. Accuracy: 0.8400 Error: 0.45736 Loss: 0.53374 Threads: 8 Forward time: 3.15s Backward time: 3.78s Step time: 35.68s\n", + "Starting Validation.\n", + "Epochs: 318 Examples seen:30528000 Validation Accuracy: 0.8195 Validation Error: 0.4774 Validation Loss: 0.6017 Total time: 638.53min\n", + "Epoch time: 1.7842 minutes. 500 epochs: 14.8687 hours.\n", + "Epochs: 318. Working time: 10.64 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big sli.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "30560000 Examples seen. Accuracy: 0.8516 Error: 0.37557 Loss: 0.40521 Threads: 8 Forward time: 3.57s Backward time: 4.49s Step time: 35.94s\n", + "30592000 Examples seen. Accuracy: 0.8482 Error: 0.45820 Loss: 0.53534 Threads: 8 Forward time: 3.11s Backward time: 3.78s Step time: 35.73s\n", + "30624000 Examples seen. Accuracy: 0.8428 Error: 0.44136 Loss: 0.49933 Threads: 8 Forward time: 3.52s Backward time: 4.24s Step time: 37.30s\n", + "Starting Validation.\n", + "Epochs: 319 Examples seen:30624000 Validation Accuracy: 0.8205 Validation Error: 0.4765 Validation Loss: 0.6054 Total time: 640.39min\n", + "Epoch time: 1.8650 minutes. 500 epochs: 15.5417 hours.\n", + "Epochs: 319. Working time: 10.67 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they were singing .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "30656000 Examples seen. Accuracy: 0.8435 Error: 0.39853 Loss: 0.48591 Threads: 8 Forward time: 3.21s Backward time: 3.84s Step time: 35.83s\n", + "30688000 Examples seen. Accuracy: 0.8423 Error: 0.40335 Loss: 0.48415 Threads: 8 Forward time: 3.34s Backward time: 3.99s Step time: 36.21s\n", + "30720000 Examples seen. Accuracy: 0.8537 Error: 0.33275 Loss: 0.38483 Threads: 8 Forward time: 3.48s Backward time: 4.33s Step time: 35.92s\n", + "Starting Validation.\n", + "Epochs: 320 Examples seen:30720000 Validation Accuracy: 0.8187 Validation Error: 0.4520 Validation Loss: 0.6141 Total time: 642.24min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.927 Min Weight: -0.964 Max Output: 0.729 Min Output: -0.746 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.729 Min Output: -0.746 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.663 Min Weight: -0.669 Max Output: 3.860 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.08s Parent:2\n", + "Layer 4 Max Output: 3.860 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.00s Parent:3\n", + "Layer 5 Max Output: 3.860 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.357 Min Weight: -0.365 Max Output: 4.064 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.38s 0.27s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.231 Min Weight: -0.233 Max Output: 3.177 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.65s 0.46s Parent:6\n", + "Layer 8 Max Output: 3.177 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.130 Min Weight: -0.175 Max Output: 2.006 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.76s 0.43s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.257 Min Weight: -0.206 Max Output: 4.648 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.004 Min Weight: -0.579 Max Output: 19.012 Min Output: -5.066 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.984 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 320 Examples seen:30720000 Test Accuracy: 0.8187 Test Error: 0.4520 Test Loss: 0.6141 Total time: 642.27min\n", + "Epoch time: 1.7959 minutes. 500 epochs: 14.9654 hours.\n", + "Epochs: 320. Working time: 10.70 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. tim saw a big saw .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "30752000 Examples seen. Accuracy: 0.8546 Error: 0.34873 Loss: 0.39732 Threads: 8 Forward time: 3.26s Backward time: 3.86s Step time: 35.49s\n", + "30784000 Examples seen. Accuracy: 0.8528 Error: 0.38690 Loss: 0.39001 Threads: 8 Forward time: 3.14s Backward time: 3.79s Step time: 35.16s\n", + "30816000 Examples seen. Accuracy: 0.8653 Error: 0.42802 Loss: 0.58748 Threads: 8 Forward time: 3.35s Backward time: 4.25s Step time: 36.60s\n", + "Starting Validation.\n", + "Epochs: 321 Examples seen:30816000 Validation Accuracy: 0.8147 Validation Error: 0.4595 Validation Loss: 0.6173 Total time: 644.10min\n", + "Epoch time: 1.8298 minutes. 500 epochs: 15.2483 hours.\n", + "Epochs: 321. Working time: 10.74 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim was playing in the park. he saw a big box on the .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "30848000 Examples seen. Accuracy: 0.8581 Error: 0.41219 Loss: 0.58270 Threads: 8 Forward time: 3.28s Backward time: 4.12s Step time: 35.51s\n", + "30880000 Examples seen. Accuracy: 0.8577 Error: 0.34352 Loss: 0.35718 Threads: 8 Forward time: 3.36s Backward time: 4.40s Step time: 36.73s\n", + "30912000 Examples seen. Accuracy: 0.8591 Error: 0.38361 Loss: 0.44345 Threads: 8 Forward time: 3.47s Backward time: 4.25s Step time: 36.13s\n", + "Starting Validation.\n", + "Epochs: 322 Examples seen:30912000 Validation Accuracy: 0.8166 Validation Error: 0.4652 Validation Loss: 0.6049 Total time: 645.95min\n", + "Epoch time: 1.8066 minutes. 500 epochs: 15.0550 hours.\n", + "Epochs: 322. Working time: 10.77 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went for a walk in the park. he saw a big tree wi.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "30944000 Examples seen. Accuracy: 0.8495 Error: 0.43235 Loss: 0.54454 Threads: 8 Forward time: 3.22s Backward time: 3.72s Step time: 35.19s\n", + "30976000 Examples seen. Accuracy: 0.8547 Error: 0.42476 Loss: 0.54169 Threads: 8 Forward time: 3.63s Backward time: 4.57s Step time: 35.43s\n", + "31008000 Examples seen. Accuracy: 0.8504 Error: 0.40524 Loss: 0.48688 Threads: 8 Forward time: 3.19s Backward time: 3.87s Step time: 35.59s\n", + "Starting Validation.\n", + "Epochs: 323 Examples seen:31008000 Validation Accuracy: 0.8172 Validation Error: 0.4658 Validation Loss: 0.6076 Total time: 647.76min\n", + "Epoch time: 1.7796 minutes. 500 epochs: 14.8304 hours.\n", + "Epochs: 323. Working time: 10.80 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "31040000 Examples seen. Accuracy: 0.8458 Error: 0.44945 Loss: 0.52581 Threads: 8 Forward time: 3.17s Backward time: 3.81s Step time: 35.64s\n", + "31072000 Examples seen. Accuracy: 0.8458 Error: 0.31860 Loss: 0.30496 Threads: 8 Forward time: 3.25s Backward time: 3.94s Step time: 36.11s\n", + "31104000 Examples seen. Accuracy: 0.8591 Error: 0.39054 Loss: 0.43376 Threads: 8 Forward time: 3.48s Backward time: 4.27s Step time: 36.85s\n", + "Starting Validation.\n", + "Epochs: 324 Examples seen:31104000 Validation Accuracy: 0.8174 Validation Error: 0.4641 Validation Loss: 0.6072 Total time: 649.62min\n", + "Epoch time: 1.8423 minutes. 500 epochs: 15.3529 hours.\n", + "Epochs: 324. Working time: 10.83 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "31136000 Examples seen. Accuracy: 0.8521 Error: 0.48052 Loss: 0.57112 Threads: 8 Forward time: 3.41s Backward time: 4.22s Step time: 36.27s\n", + "31168000 Examples seen. Accuracy: 0.8468 Error: 0.40234 Loss: 0.45708 Threads: 8 Forward time: 3.42s Backward time: 4.07s Step time: 35.58s\n", + "31200000 Examples seen. Accuracy: 0.8423 Error: 0.40308 Loss: 0.51380 Threads: 8 Forward time: 3.07s Backward time: 3.59s Step time: 35.32s\n", + "Starting Validation.\n", + "Epochs: 325 Examples seen:31200000 Validation Accuracy: 0.8128 Validation Error: 0.4782 Validation Loss: 0.6160 Total time: 651.45min\n", + "Epoch time: 1.7662 minutes. 500 epochs: 14.7179 hours.\n", + "Epochs: 325. Working time: 10.86 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they wanted to g.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "31232000 Examples seen. Accuracy: 0.8423 Error: 0.35472 Loss: 0.39743 Threads: 8 Forward time: 3.04s Backward time: 3.54s Step time: 35.03s\n", + "31264000 Examples seen. Accuracy: 0.8378 Error: 0.57749 Loss: 0.76065 Threads: 8 Forward time: 3.39s Backward time: 4.25s Step time: 35.15s\n", + "31296000 Examples seen. Accuracy: 0.8394 Error: 0.35235 Loss: 0.44621 Threads: 8 Forward time: 3.41s Backward time: 4.33s Step time: 36.18s\n", + "Starting Validation.\n", + "Epochs: 326 Examples seen:31296000 Validation Accuracy: 0.8191 Validation Error: 0.4692 Validation Loss: 0.6005 Total time: 653.27min\n", + "Epoch time: 1.8089 minutes. 500 epochs: 15.0742 hours.\n", + "Epochs: 326. Working time: 10.89 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "31328000 Examples seen. Accuracy: 0.8393 Error: 0.45757 Loss: 0.51228 Threads: 8 Forward time: 3.26s Backward time: 3.88s Step time: 34.98s\n", + "31360000 Examples seen. Accuracy: 0.8524 Error: 0.40134 Loss: 0.51236 Threads: 8 Forward time: 3.15s Backward time: 3.78s Step time: 35.97s\n", + "31392000 Examples seen. Accuracy: 0.8531 Error: 0.39175 Loss: 0.44438 Threads: 8 Forward time: 3.16s Backward time: 3.78s Step time: 34.97s\n", + "Starting Validation.\n", + "Epochs: 327 Examples seen:31392000 Validation Accuracy: 0.8170 Validation Error: 0.4678 Validation Loss: 0.6112 Total time: 655.08min\n", + "Epoch time: 1.7486 minutes. 500 epochs: 14.5713 hours.\n", + "Epochs: 327. Working time: 10.92 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "31424000 Examples seen. Accuracy: 0.8478 Error: 0.43081 Loss: 0.47756 Threads: 8 Forward time: 3.11s Backward time: 3.74s Step time: 35.26s\n", + "31456000 Examples seen. Accuracy: 0.8441 Error: 0.44572 Loss: 0.54063 Threads: 8 Forward time: 3.17s Backward time: 3.77s Step time: 36.16s\n", + "31488000 Examples seen. Accuracy: 0.8424 Error: 0.42479 Loss: 0.51551 Threads: 8 Forward time: 3.31s Backward time: 4.02s Step time: 36.13s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 328 Examples seen:31488000 Validation Accuracy: 0.8216 Validation Error: 0.4786 Validation Loss: 0.6133 Total time: 656.96min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.929 Min Weight: -0.967 Max Output: 0.730 Min Output: -0.748 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.730 Min Output: -0.748 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.665 Min Weight: -0.674 Max Output: 3.884 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.884 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.884 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.359 Min Weight: -0.373 Max Output: 4.407 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.39s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.235 Min Weight: -0.235 Max Output: 3.089 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.59s 0.42s Parent:6\n", + "Layer 8 Max Output: 3.089 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.133 Min Weight: -0.180 Max Output: 1.885 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.75s 0.38s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.259 Min Weight: -0.206 Max Output: 4.099 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.009 Min Weight: -0.582 Max Output: 15.909 Min Output: -3.970 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.982 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.8067 minutes. 500 epochs: 15.0558 hours.\n", + "Epochs: 328. Working time: 10.95 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sally was walking in the park. she saw a big party w.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "31520000 Examples seen. Accuracy: 0.8412 Error: 0.40605 Loss: 0.55769 Threads: 8 Forward time: 3.56s Backward time: 4.27s Step time: 35.59s\n", + "31552000 Examples seen. Accuracy: 0.8434 Error: 0.44225 Loss: 0.59389 Threads: 8 Forward time: 3.49s Backward time: 4.28s Step time: 36.28s\n", + "31584000 Examples seen. Accuracy: 0.8399 Error: 0.43077 Loss: 0.58792 Threads: 8 Forward time: 3.73s Backward time: 4.44s Step time: 36.40s\n", + "Starting Validation.\n", + "Epochs: 329 Examples seen:31584000 Validation Accuracy: 0.8136 Validation Error: 0.4782 Validation Loss: 0.6100 Total time: 658.81min\n", + "Epoch time: 1.8199 minutes. 500 epochs: 15.1654 hours.\n", + "Epochs: 329. Working time: 10.98 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy was walking in the park. she saw a big box with.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "31616000 Examples seen. Accuracy: 0.8408 Error: 0.43987 Loss: 0.46968 Threads: 8 Forward time: 3.40s Backward time: 4.16s Step time: 36.29s\n", + "31648000 Examples seen. Accuracy: 0.8545 Error: 0.36066 Loss: 0.46598 Threads: 8 Forward time: 3.20s Backward time: 3.81s Step time: 36.41s\n", + "31680000 Examples seen. Accuracy: 0.8471 Error: 0.44175 Loss: 0.58741 Threads: 8 Forward time: 3.20s Backward time: 3.90s Step time: 35.39s\n", + "Starting Validation.\n", + "Epochs: 330 Examples seen:31680000 Validation Accuracy: 0.8195 Validation Error: 0.4689 Validation Loss: 0.5959 Total time: 660.66min\n", + "Starting Testing.\n", + "Epochs: 330 Examples seen:31680000 Test Accuracy: 0.8195 Test Error: 0.4689 Test Loss: 0.5959 Total time: 660.69min\n", + "Epoch time: 1.7697 minutes. 500 epochs: 14.7471 hours.\n", + "Epochs: 330. Working time: 11.01 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big tree with her m.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "31712000 Examples seen. Accuracy: 0.8428 Error: 0.42675 Loss: 0.54104 Threads: 8 Forward time: 3.33s Backward time: 4.10s Step time: 35.36s\n", + "31744000 Examples seen. Accuracy: 0.8433 Error: 0.43629 Loss: 0.52877 Threads: 8 Forward time: 3.33s Backward time: 3.86s Step time: 35.76s\n", + "31776000 Examples seen. Accuracy: 0.8435 Error: 0.41276 Loss: 0.52477 Threads: 8 Forward time: 3.27s Backward time: 4.02s Step time: 34.89s\n", + "Starting Validation.\n", + "Epochs: 331 Examples seen:31776000 Validation Accuracy: 0.8207 Validation Error: 0.4670 Validation Loss: 0.6099 Total time: 662.50min\n", + "Epoch time: 1.7444 minutes. 500 epochs: 14.5371 hours.\n", + "Epochs: 331. Working time: 11.04 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big sister with her.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "31808000 Examples seen. Accuracy: 0.8452 Error: 0.48057 Loss: 0.58338 Threads: 8 Forward time: 3.26s Backward time: 3.96s Step time: 34.44s\n", + "31840000 Examples seen. Accuracy: 0.8453 Error: 0.36342 Loss: 0.42165 Threads: 8 Forward time: 3.25s Backward time: 3.93s Step time: 34.46s\n", + "31872000 Examples seen. Accuracy: 0.8649 Error: 0.30408 Loss: 0.29509 Threads: 8 Forward time: 3.43s Backward time: 4.24s Step time: 35.69s\n", + "Starting Validation.\n", + "Epochs: 332 Examples seen:31872000 Validation Accuracy: 0.8134 Validation Error: 0.4576 Validation Loss: 0.6152 Total time: 664.29min\n", + "Epoch time: 1.7843 minutes. 500 epochs: 14.8692 hours.\n", + "Epochs: 332. Working time: 11.07 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. the sky and the sk.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "31904000 Examples seen. Accuracy: 0.8578 Error: 0.37647 Loss: 0.39485 Threads: 8 Forward time: 3.37s Backward time: 4.09s Step time: 35.51s\n", + "31936000 Examples seen. Accuracy: 0.8453 Error: 0.41940 Loss: 0.49319 Threads: 8 Forward time: 3.23s Backward time: 3.84s Step time: 35.59s\n", + "31968000 Examples seen. Accuracy: 0.8438 Error: 0.47855 Loss: 0.59740 Threads: 8 Forward time: 3.45s Backward time: 4.19s Step time: 34.49s\n", + "Starting Validation.\n", + "Epochs: 333 Examples seen:31968000 Validation Accuracy: 0.8170 Validation Error: 0.4859 Validation Loss: 0.6118 Total time: 666.09min\n", + "Epoch time: 1.7247 minutes. 500 epochs: 14.3725 hours.\n", + "Epochs: 333. Working time: 11.10 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named amy was walking in the park. she saw a big slide and.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "32000000 Examples seen. Accuracy: 0.8422 Error: 0.43343 Loss: 0.51638 Threads: 8 Forward time: 3.09s Backward time: 3.58s Step time: 34.95s\n", + "32032000 Examples seen. Accuracy: 0.8411 Error: 0.44539 Loss: 0.53552 Threads: 8 Forward time: 3.10s Backward time: 3.54s Step time: 35.06s\n", + "32064000 Examples seen. Accuracy: 0.8412 Error: 0.48315 Loss: 0.56960 Threads: 8 Forward time: 3.28s Backward time: 3.85s Step time: 34.83s\n", + "Starting Validation.\n", + "Epochs: 334 Examples seen:32064000 Validation Accuracy: 0.8168 Validation Error: 0.4754 Validation Loss: 0.5999 Total time: 667.88min\n", + "Epoch time: 1.7416 minutes. 500 epochs: 14.5133 hours.\n", + "Epochs: 334. Working time: 11.13 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went for a walk. he saw a big castle on the sky w.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "32096000 Examples seen. Accuracy: 0.8415 Error: 0.42686 Loss: 0.58087 Threads: 8 Forward time: 3.25s Backward time: 3.86s Step time: 34.88s\n", + "32128000 Examples seen. Accuracy: 0.8528 Error: 0.39572 Loss: 0.46921 Threads: 8 Forward time: 3.32s Backward time: 4.16s Step time: 35.29s\n", + "32160000 Examples seen. Accuracy: 0.8598 Error: 0.41924 Loss: 0.47464 Threads: 8 Forward time: 3.39s Backward time: 4.06s Step time: 37.15s\n", + "Starting Validation.\n", + "Epochs: 335 Examples seen:32160000 Validation Accuracy: 0.8199 Validation Error: 0.4625 Validation Loss: 0.6060 Total time: 669.72min\n", + "Epoch time: 1.8575 minutes. 500 epochs: 15.4788 hours.\n", + "Epochs: 335. Working time: 11.16 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "32192000 Examples seen. Accuracy: 0.8594 Error: 0.29219 Loss: 0.27725 Threads: 8 Forward time: 3.47s Backward time: 4.49s Step time: 37.09s\n", + "32224000 Examples seen. Accuracy: 0.8685 Error: 0.32174 Loss: 0.35320 Threads: 8 Forward time: 3.16s Backward time: 3.86s Step time: 36.22s\n", + "32256000 Examples seen. Accuracy: 0.8674 Error: 0.36794 Loss: 0.44957 Threads: 8 Forward time: 3.08s Backward time: 3.60s Step time: 35.91s\n", + "Starting Validation.\n", + "Epochs: 336 Examples seen:32256000 Validation Accuracy: 0.8197 Validation Error: 0.4508 Validation Loss: 0.6174 Total time: 671.58min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.935 Min Weight: -0.970 Max Output: 0.733 Min Output: -0.749 TNNetPointwiseConv 81,1,32 Times: 0.21s 0.08s Parent:0\n", + "Layer 2 Max Output: 0.733 Min Output: -0.749 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.672 Min Weight: -0.682 Max Output: 3.971 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.12s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.971 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.971 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.364 Min Weight: -0.378 Max Output: 4.169 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.37s 0.22s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.237 Min Weight: -0.236 Max Output: 3.268 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.59s 0.39s Parent:6\n", + "Layer 8 Max Output: 3.268 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.08s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.136 Min Weight: -0.180 Max Output: 1.893 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.74s 0.37s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.207 Max Output: 4.975 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.013 Min Weight: -0.588 Max Output: 20.012 Min Output: -5.130 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.996 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7956 minutes. 500 epochs: 14.9633 hours.\n", + "Epochs: 336. Working time: 11.19 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "32288000 Examples seen. Accuracy: 0.8562 Error: 0.38766 Loss: 0.46223 Threads: 8 Forward time: 3.40s Backward time: 4.20s Step time: 36.76s\n", + "32320000 Examples seen. Accuracy: 0.8465 Error: 0.42641 Loss: 0.53919 Threads: 8 Forward time: 3.17s Backward time: 3.75s Step time: 35.31s\n", + "32352000 Examples seen. Accuracy: 0.8448 Error: 0.43651 Loss: 0.52428 Threads: 8 Forward time: 3.15s Backward time: 3.71s Step time: 35.33s\n", + "Starting Validation.\n", + "Epochs: 337 Examples seen:32352000 Validation Accuracy: 0.8168 Validation Error: 0.4757 Validation Loss: 0.6056 Total time: 673.42min\n", + "Epoch time: 1.7667 minutes. 500 epochs: 14.7229 hours.\n", + "Epochs: 337. Working time: 11.22 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park. she was very excited to go to.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "32384000 Examples seen. Accuracy: 0.8464 Error: 0.38944 Loss: 0.38447 Threads: 8 Forward time: 3.28s Backward time: 3.95s Step time: 34.67s\n", + "32416000 Examples seen. Accuracy: 0.8588 Error: 0.40473 Loss: 0.52539 Threads: 8 Forward time: 3.22s Backward time: 3.92s Step time: 34.71s\n", + "32448000 Examples seen. Accuracy: 0.8535 Error: 0.32912 Loss: 0.35963 Threads: 8 Forward time: 3.23s Backward time: 3.95s Step time: 35.27s\n", + "Starting Validation.\n", + "Epochs: 338 Examples seen:32448000 Validation Accuracy: 0.8193 Validation Error: 0.4617 Validation Loss: 0.5935 Total time: 675.21min\n", + "Epoch time: 1.7633 minutes. 500 epochs: 14.6942 hours.\n", + "Epochs: 338. Working time: 11.25 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "32480000 Examples seen. Accuracy: 0.8599 Error: 0.40087 Loss: 0.46354 Threads: 8 Forward time: 3.56s Backward time: 4.36s Step time: 35.81s\n", + "32512000 Examples seen. Accuracy: 0.8496 Error: 0.41736 Loss: 0.51416 Threads: 8 Forward time: 3.57s Backward time: 4.45s Step time: 34.77s\n", + "32544000 Examples seen. Accuracy: 0.8511 Error: 0.30812 Loss: 0.30691 Threads: 8 Forward time: 3.29s Backward time: 4.14s Step time: 34.48s\n", + "Starting Validation.\n", + "Epochs: 339 Examples seen:32544000 Validation Accuracy: 0.8174 Validation Error: 0.4540 Validation Loss: 0.6080 Total time: 677.00min\n", + "Epoch time: 1.7239 minutes. 500 epochs: 14.3654 hours.\n", + "Epochs: 339. Working time: 11.28 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sun was shin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "32576000 Examples seen. Accuracy: 0.8613 Error: 0.38218 Loss: 0.46930 Threads: 8 Forward time: 3.21s Backward time: 3.75s Step time: 36.27s\n", + "32608000 Examples seen. Accuracy: 0.8620 Error: 0.37101 Loss: 0.36894 Threads: 8 Forward time: 3.28s Backward time: 4.06s Step time: 35.39s\n", + "32640000 Examples seen. Accuracy: 0.8647 Error: 0.29669 Loss: 0.38879 Threads: 8 Forward time: 3.52s Backward time: 4.45s Step time: 35.15s\n", + "Starting Validation.\n", + "Epochs: 340 Examples seen:32640000 Validation Accuracy: 0.8124 Validation Error: 0.4708 Validation Loss: 0.6101 Total time: 678.82min\n", + "Starting Testing.\n", + "Epochs: 340 Examples seen:32640000 Test Accuracy: 0.8124 Test Error: 0.4708 Test Loss: 0.6101 Total time: 678.86min\n", + "Epoch time: 1.7574 minutes. 500 epochs: 14.6450 hours.\n", + "Epochs: 340. Working time: 11.31 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went for a walk with his mothing to the park. he .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "32672000 Examples seen. Accuracy: 0.8565 Error: 0.38125 Loss: 0.45432 Threads: 8 Forward time: 3.03s Backward time: 3.64s Step time: 34.72s\n", + "32704000 Examples seen. Accuracy: 0.8477 Error: 0.46837 Loss: 0.60764 Threads: 8 Forward time: 3.09s Backward time: 3.85s Step time: 34.42s\n", + "32736000 Examples seen. Accuracy: 0.8452 Error: 0.40962 Loss: 0.46290 Threads: 8 Forward time: 3.38s Backward time: 4.06s Step time: 34.07s\n", + "Starting Validation.\n", + "Epochs: 341 Examples seen:32736000 Validation Accuracy: 0.8199 Validation Error: 0.4620 Validation Loss: 0.5958 Total time: 680.62min\n", + "Epoch time: 1.7034 minutes. 500 epochs: 14.1950 hours.\n", + "Epochs: 341. Working time: 11.34 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lucy wanted to go outside and sara. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "32768000 Examples seen. Accuracy: 0.8583 Error: 0.31749 Loss: 0.30075 Threads: 8 Forward time: 3.14s Backward time: 3.86s Step time: 34.68s\n", + "32800000 Examples seen. Accuracy: 0.8625 Error: 0.39385 Loss: 0.46769 Threads: 8 Forward time: 3.20s Backward time: 3.83s Step time: 34.90s\n", + "32832000 Examples seen. Accuracy: 0.8486 Error: 0.41533 Loss: 0.49209 Threads: 8 Forward time: 3.14s Backward time: 3.61s Step time: 35.51s\n", + "Starting Validation.\n", + "Epochs: 342 Examples seen:32832000 Validation Accuracy: 0.8172 Validation Error: 0.4702 Validation Loss: 0.6066 Total time: 682.42min\n", + "Epoch time: 1.7755 minutes. 500 epochs: 14.7954 hours.\n", + "Epochs: 342. Working time: 11.37 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "32864000 Examples seen. Accuracy: 0.8433 Error: 0.48070 Loss: 0.59728 Threads: 8 Forward time: 3.29s Backward time: 3.96s Step time: 34.21s\n", + "32896000 Examples seen. Accuracy: 0.8417 Error: 0.49576 Loss: 0.65170 Threads: 8 Forward time: 3.14s Backward time: 3.71s Step time: 34.26s\n", + "32928000 Examples seen. Accuracy: 0.8430 Error: 0.40871 Loss: 0.44147 Threads: 8 Forward time: 3.52s Backward time: 4.31s Step time: 34.66s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 343 Examples seen:32928000 Validation Accuracy: 0.8222 Validation Error: 0.4684 Validation Loss: 0.6032 Total time: 684.22min\n", + "Epoch time: 1.7329 minutes. 500 epochs: 14.4408 hours.\n", + "Epochs: 343. Working time: 11.40 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went outside to play. she saw a big brocks was .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "32960000 Examples seen. Accuracy: 0.8425 Error: 0.42150 Loss: 0.51670 Threads: 8 Forward time: 3.25s Backward time: 3.90s Step time: 34.89s\n", + "32992000 Examples seen. Accuracy: 0.8439 Error: 0.45095 Loss: 0.56890 Threads: 8 Forward time: 3.19s Backward time: 3.88s Step time: 35.27s\n", + "33024000 Examples seen. Accuracy: 0.8453 Error: 0.40274 Loss: 0.49183 Threads: 8 Forward time: 3.26s Backward time: 3.83s Step time: 34.88s\n", + "Starting Validation.\n", + "Epochs: 344 Examples seen:33024000 Validation Accuracy: 0.8222 Validation Error: 0.4732 Validation Loss: 0.6052 Total time: 686.02min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.936 Min Weight: -0.971 Max Output: 0.733 Min Output: -0.749 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.08s Parent:0\n", + "Layer 2 Max Output: 0.733 Min Output: -0.749 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.678 Min Weight: -0.684 Max Output: 3.942 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.07s Parent:2\n", + "Layer 4 Max Output: 3.942 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 3.942 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.362 Min Weight: -0.379 Max Output: 4.297 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.36s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.236 Min Weight: -0.240 Max Output: 3.096 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.58s 0.39s Parent:6\n", + "Layer 8 Max Output: 3.096 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.00s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.135 Min Weight: -0.181 Max Output: 1.899 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.72s 0.39s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.207 Max Output: 3.766 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.015 Min Weight: -0.592 Max Output: 16.032 Min Output: -4.231 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.942 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7441 minutes. 500 epochs: 14.5342 hours.\n", + "Epochs: 344. Working time: 11.43 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named mary was playing in her backyard. she saw a big bask.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "33056000 Examples seen. Accuracy: 0.8539 Error: 0.38733 Loss: 0.47607 Threads: 8 Forward time: 3.15s Backward time: 3.92s Step time: 34.77s\n", + "33088000 Examples seen. Accuracy: 0.8690 Error: 0.41164 Loss: 0.47776 Threads: 8 Forward time: 3.54s Backward time: 4.30s Step time: 35.98s\n", + "33120000 Examples seen. Accuracy: 0.8636 Error: 0.44301 Loss: 0.55532 Threads: 8 Forward time: 3.32s Backward time: 4.23s Step time: 35.31s\n", + "Starting Validation.\n", + "Epochs: 345 Examples seen:33120000 Validation Accuracy: 0.8205 Validation Error: 0.4613 Validation Loss: 0.6054 Total time: 687.83min\n", + "Epoch time: 1.7654 minutes. 500 epochs: 14.7121 hours.\n", + "Epochs: 345. Working time: 11.46 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "33152000 Examples seen. Accuracy: 0.8668 Error: 0.39671 Loss: 0.47637 Threads: 8 Forward time: 3.24s Backward time: 3.86s Step time: 34.67s\n", + "33184000 Examples seen. Accuracy: 0.8582 Error: 0.39165 Loss: 0.45252 Threads: 8 Forward time: 3.17s Backward time: 3.84s Step time: 34.62s\n", + "33216000 Examples seen. Accuracy: 0.8503 Error: 0.45566 Loss: 0.54360 Threads: 8 Forward time: 3.14s Backward time: 3.75s Step time: 34.77s\n", + "Starting Validation.\n", + "Epochs: 346 Examples seen:33216000 Validation Accuracy: 0.8214 Validation Error: 0.4724 Validation Loss: 0.6039 Total time: 689.61min\n", + "Epoch time: 1.7387 minutes. 500 epochs: 14.4892 hours.\n", + "Epochs: 346. Working time: 11.49 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they wanted to go .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "33248000 Examples seen. Accuracy: 0.8418 Error: 0.39980 Loss: 0.43370 Threads: 8 Forward time: 3.21s Backward time: 3.73s Step time: 35.77s\n", + "33280000 Examples seen. Accuracy: 0.8418 Error: 0.40680 Loss: 0.50677 Threads: 8 Forward time: 3.16s Backward time: 3.48s Step time: 34.93s\n", + "33312000 Examples seen. Accuracy: 0.8423 Error: 0.49424 Loss: 0.56280 Threads: 8 Forward time: 3.24s Backward time: 3.88s Step time: 35.33s\n", + "Starting Validation.\n", + "Epochs: 347 Examples seen:33312000 Validation Accuracy: 0.8145 Validation Error: 0.4848 Validation Loss: 0.6062 Total time: 691.42min\n", + "Epoch time: 1.7667 minutes. 500 epochs: 14.7221 hours.\n", + "Epochs: 347. Working time: 11.52 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the started to g.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "33344000 Examples seen. Accuracy: 0.8558 Error: 0.36320 Loss: 0.39456 Threads: 8 Forward time: 3.28s Backward time: 4.07s Step time: 34.36s\n", + "33376000 Examples seen. Accuracy: 0.8605 Error: 0.33620 Loss: 0.36036 Threads: 8 Forward time: 3.23s Backward time: 3.87s Step time: 35.30s\n", + "33408000 Examples seen. Accuracy: 0.8605 Error: 0.40973 Loss: 0.49059 Threads: 8 Forward time: 3.17s Backward time: 3.81s Step time: 35.64s\n", + "Starting Validation.\n", + "Epochs: 348 Examples seen:33408000 Validation Accuracy: 0.8199 Validation Error: 0.4500 Validation Loss: 0.6061 Total time: 693.22min\n", + "Epoch time: 1.7820 minutes. 500 epochs: 14.8504 hours.\n", + "Epochs: 348. Working time: 11.55 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. he saw a big tree .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "33440000 Examples seen. Accuracy: 0.8698 Error: 0.39002 Loss: 0.44997 Threads: 8 Forward time: 3.34s Backward time: 4.16s Step time: 35.18s\n", + "33472000 Examples seen. Accuracy: 0.8505 Error: 0.37320 Loss: 0.44502 Threads: 8 Forward time: 3.13s Backward time: 3.62s Step time: 35.17s\n", + "33504000 Examples seen. Accuracy: 0.8470 Error: 0.37339 Loss: 0.41077 Threads: 8 Forward time: 3.07s Backward time: 3.56s Step time: 34.81s\n", + "Starting Validation.\n", + "Epochs: 349 Examples seen:33504000 Validation Accuracy: 0.8216 Validation Error: 0.4555 Validation Loss: 0.5875 Total time: 695.01min\n", + "Epoch time: 1.7406 minutes. 500 epochs: 14.5050 hours.\n", + "Epochs: 349. Working time: 11.58 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "33536000 Examples seen. Accuracy: 0.8447 Error: 0.39037 Loss: 0.43150 Threads: 8 Forward time: 3.39s Backward time: 4.02s Step time: 34.83s\n", + "33568000 Examples seen. Accuracy: 0.8436 Error: 0.42632 Loss: 0.51460 Threads: 8 Forward time: 3.16s Backward time: 3.78s Step time: 35.08s\n", + "33600000 Examples seen. Accuracy: 0.8455 Error: 0.40743 Loss: 0.49173 Threads: 8 Forward time: 3.26s Backward time: 3.90s Step time: 35.46s\n", + "Starting Validation.\n", + "Epochs: 350 Examples seen:33600000 Validation Accuracy: 0.8203 Validation Error: 0.4629 Validation Loss: 0.6058 Total time: 696.82min\n", + "Starting Testing.\n", + "Epochs: 350 Examples seen:33600000 Test Accuracy: 0.8203 Test Error: 0.4629 Test Loss: 0.6058 Total time: 696.85min\n", + "Epoch time: 1.7728 minutes. 500 epochs: 14.7729 hours.\n", + "Epochs: 350. Working time: 11.61 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "33632000 Examples seen. Accuracy: 0.8629 Error: 0.32458 Loss: 0.34396 Threads: 8 Forward time: 3.16s Backward time: 3.70s Step time: 35.31s\n", + "33664000 Examples seen. Accuracy: 0.8612 Error: 0.37561 Loss: 0.43748 Threads: 8 Forward time: 3.20s Backward time: 3.87s Step time: 34.76s\n", + "33696000 Examples seen. Accuracy: 0.8514 Error: 0.43488 Loss: 0.56868 Threads: 8 Forward time: 3.22s Backward time: 3.75s Step time: 34.76s\n", + "Starting Validation.\n", + "Epochs: 351 Examples seen:33696000 Validation Accuracy: 0.8207 Validation Error: 0.4649 Validation Loss: 0.6049 Total time: 698.64min\n", + "Epoch time: 1.7379 minutes. 500 epochs: 14.4825 hours.\n", + "Epochs: 351. Working time: 11.64 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bu.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "33728000 Examples seen. Accuracy: 0.8453 Error: 0.44341 Loss: 0.54759 Threads: 8 Forward time: 3.44s Backward time: 4.18s Step time: 34.88s\n", + "33760000 Examples seen. Accuracy: 0.8465 Error: 0.40905 Loss: 0.51246 Threads: 8 Forward time: 3.35s Backward time: 3.98s Step time: 34.76s\n", + "33792000 Examples seen. Accuracy: 0.8465 Error: 0.44788 Loss: 0.54338 Threads: 8 Forward time: 3.06s Backward time: 3.59s Step time: 35.30s\n", + "Starting Validation.\n", + "Epochs: 352 Examples seen:33792000 Validation Accuracy: 0.8157 Validation Error: 0.4686 Validation Loss: 0.5983 Total time: 700.43min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.937 Min Weight: -0.971 Max Output: 0.734 Min Output: -0.749 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.734 Min Output: -0.749 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.687 Min Weight: -0.685 Max Output: 4.041 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.041 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.00s Parent:3\n", + "Layer 5 Max Output: 4.041 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.361 Min Weight: -0.381 Max Output: 4.106 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.33s 0.24s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.235 Min Weight: -0.237 Max Output: 3.165 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.53s 0.37s Parent:6\n", + "Layer 8 Max Output: 3.165 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.137 Min Weight: -0.185 Max Output: 1.828 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.70s 0.34s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.206 Max Output: 3.521 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.024 Min Weight: -0.592 Max Output: 16.493 Min Output: -4.194 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.991 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7649 minutes. 500 epochs: 14.7075 hours.\n", + "Epochs: 352. Working time: 11.67 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. tim was very happy.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "33824000 Examples seen. Accuracy: 0.8440 Error: 0.49305 Loss: 0.62213 Threads: 8 Forward time: 3.47s Backward time: 4.28s Step time: 36.61s\n", + "33856000 Examples seen. Accuracy: 0.8447 Error: 0.39115 Loss: 0.44220 Threads: 8 Forward time: 3.36s Backward time: 3.98s Step time: 35.26s\n", + "33888000 Examples seen. Accuracy: 0.8440 Error: 0.40436 Loss: 0.50023 Threads: 8 Forward time: 3.30s Backward time: 3.86s Step time: 35.72s\n", + "Starting Validation.\n", + "Epochs: 353 Examples seen:33888000 Validation Accuracy: 0.8193 Validation Error: 0.4694 Validation Loss: 0.5953 Total time: 702.27min\n", + "Epoch time: 1.7859 minutes. 500 epochs: 14.8825 hours.\n", + "Epochs: 353. Working time: 11.70 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they wanted to s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "33920000 Examples seen. Accuracy: 0.8453 Error: 0.39520 Loss: 0.47936 Threads: 8 Forward time: 3.20s Backward time: 3.75s Step time: 35.41s\n", + "33952000 Examples seen. Accuracy: 0.8433 Error: 0.41477 Loss: 0.57704 Threads: 8 Forward time: 3.07s Backward time: 3.71s Step time: 34.81s\n", + "33984000 Examples seen. Accuracy: 0.8585 Error: 0.31742 Loss: 0.32092 Threads: 8 Forward time: 3.25s Backward time: 3.93s Step time: 35.33s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 354 Examples seen:33984000 Validation Accuracy: 0.8228 Validation Error: 0.4543 Validation Loss: 0.6069 Total time: 704.12min\n", + "Epoch time: 1.7667 minutes. 500 epochs: 14.7221 hours.\n", + "Epochs: 354. Working time: 11.74 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "34016000 Examples seen. Accuracy: 0.8597 Error: 0.45314 Loss: 0.55795 Threads: 8 Forward time: 3.14s Backward time: 3.66s Step time: 35.18s\n", + "34048000 Examples seen. Accuracy: 0.8502 Error: 0.36229 Loss: 0.43691 Threads: 8 Forward time: 3.11s Backward time: 3.65s Step time: 34.93s\n", + "34080000 Examples seen. Accuracy: 0.8464 Error: 0.38646 Loss: 0.47181 Threads: 8 Forward time: 3.22s Backward time: 3.84s Step time: 34.70s\n", + "Starting Validation.\n", + "Epochs: 355 Examples seen:34080000 Validation Accuracy: 0.8193 Validation Error: 0.4600 Validation Loss: 0.5960 Total time: 705.90min\n", + "Epoch time: 1.7350 minutes. 500 epochs: 14.4587 hours.\n", + "Epochs: 355. Working time: 11.77 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was playing in the park. she saw a big box of t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "34112000 Examples seen. Accuracy: 0.8473 Error: 0.42830 Loss: 0.50052 Threads: 8 Forward time: 3.12s Backward time: 3.68s Step time: 34.76s\n", + "34144000 Examples seen. Accuracy: 0.8444 Error: 0.43695 Loss: 0.54419 Threads: 8 Forward time: 3.71s Backward time: 4.43s Step time: 35.25s\n", + "34176000 Examples seen. Accuracy: 0.8413 Error: 0.45847 Loss: 0.53910 Threads: 8 Forward time: 3.21s Backward time: 3.79s Step time: 35.57s\n", + "Starting Validation.\n", + "Epochs: 356 Examples seen:34176000 Validation Accuracy: 0.8207 Validation Error: 0.4681 Validation Loss: 0.5881 Total time: 707.71min\n", + "Epoch time: 1.7784 minutes. 500 epochs: 14.8200 hours.\n", + "Epochs: 356. Working time: 11.80 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sue went to the park with her mom. she saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "34208000 Examples seen. Accuracy: 0.8621 Error: 0.36694 Loss: 0.39033 Threads: 8 Forward time: 3.29s Backward time: 3.99s Step time: 34.77s\n", + "34240000 Examples seen. Accuracy: 0.8589 Error: 0.43306 Loss: 0.51879 Threads: 8 Forward time: 3.37s Backward time: 4.21s Step time: 35.52s\n", + "34272000 Examples seen. Accuracy: 0.8464 Error: 0.40346 Loss: 0.45076 Threads: 8 Forward time: 3.51s Backward time: 4.19s Step time: 34.83s\n", + "Starting Validation.\n", + "Epochs: 357 Examples seen:34272000 Validation Accuracy: 0.8205 Validation Error: 0.4718 Validation Loss: 0.5936 Total time: 709.50min\n", + "Epoch time: 1.7414 minutes. 500 epochs: 14.5117 hours.\n", + "Epochs: 357. Working time: 11.83 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they walked all .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "34304000 Examples seen. Accuracy: 0.8623 Error: 0.33926 Loss: 0.38400 Threads: 8 Forward time: 3.46s Backward time: 4.13s Step time: 35.03s\n", + "34336000 Examples seen. Accuracy: 0.8564 Error: 0.40204 Loss: 0.45653 Threads: 8 Forward time: 3.04s Backward time: 3.51s Step time: 35.32s\n", + "34368000 Examples seen. Accuracy: 0.8475 Error: 0.42312 Loss: 0.50247 Threads: 8 Forward time: 3.20s Backward time: 3.77s Step time: 34.43s\n", + "Starting Validation.\n", + "Epochs: 358 Examples seen:34368000 Validation Accuracy: 0.8189 Validation Error: 0.4642 Validation Loss: 0.6014 Total time: 711.29min\n", + "Epoch time: 1.7214 minutes. 500 epochs: 14.3450 hours.\n", + "Epochs: 358. Working time: 11.85 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "34400000 Examples seen. Accuracy: 0.8447 Error: 0.42008 Loss: 0.49360 Threads: 8 Forward time: 3.09s Backward time: 3.69s Step time: 34.30s\n", + "34432000 Examples seen. Accuracy: 0.8418 Error: 0.46338 Loss: 0.59605 Threads: 8 Forward time: 3.10s Backward time: 3.72s Step time: 34.97s\n", + "34464000 Examples seen. Accuracy: 0.8537 Error: 0.27089 Loss: 0.23839 Threads: 8 Forward time: 3.06s Backward time: 3.62s Step time: 34.29s\n", + "Starting Validation.\n", + "Epochs: 359 Examples seen:34464000 Validation Accuracy: 0.8203 Validation Error: 0.4626 Validation Loss: 0.6031 Total time: 713.06min\n", + "Epoch time: 1.7147 minutes. 500 epochs: 14.2896 hours.\n", + "Epochs: 359. Working time: 11.88 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim was playing in his backyard. he had a big house a.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "34496000 Examples seen. Accuracy: 0.8659 Error: 0.43050 Loss: 0.56266 Threads: 8 Forward time: 3.48s Backward time: 4.09s Step time: 34.38s\n", + "34528000 Examples seen. Accuracy: 0.8598 Error: 0.39727 Loss: 0.44026 Threads: 8 Forward time: 3.15s Backward time: 3.80s Step time: 34.86s\n", + "34560000 Examples seen. Accuracy: 0.8513 Error: 0.43967 Loss: 0.52879 Threads: 8 Forward time: 3.23s Backward time: 3.76s Step time: 34.47s\n", + "Starting Validation.\n", + "Epochs: 360 Examples seen:34560000 Validation Accuracy: 0.8187 Validation Error: 0.4713 Validation Loss: 0.6005 Total time: 714.84min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.942 Min Weight: -0.976 Max Output: 0.736 Min Output: -0.751 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.736 Min Output: -0.751 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.690 Min Weight: -0.688 Max Output: 4.077 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.077 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.077 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.365 Min Weight: -0.380 Max Output: 4.073 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.43s 0.26s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.235 Min Weight: -0.241 Max Output: 3.314 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.50s 0.38s Parent:6\n", + "Layer 8 Max Output: 3.314 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.139 Min Weight: -0.186 Max Output: 1.864 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.72s 0.37s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.259 Min Weight: -0.206 Max Output: 4.082 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.028 Min Weight: -0.594 Max Output: 16.661 Min Output: -4.489 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.977 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 360 Examples seen:34560000 Test Accuracy: 0.8187 Test Error: 0.4713 Test Loss: 0.6005 Total time: 714.87min\n", + "Epoch time: 1.7234 minutes. 500 epochs: 14.3621 hours.\n", + "Epochs: 360. Working time: 11.91 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mom. the sky and she .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "34592000 Examples seen. Accuracy: 0.8485 Error: 0.45274 Loss: 0.57201 Threads: 8 Forward time: 3.63s Backward time: 4.09s Step time: 34.49s\n", + "34624000 Examples seen. Accuracy: 0.8677 Error: 0.33218 Loss: 0.38682 Threads: 8 Forward time: 3.26s Backward time: 4.01s Step time: 34.34s\n", + "34656000 Examples seen. Accuracy: 0.8595 Error: 0.42893 Loss: 0.53375 Threads: 8 Forward time: 3.42s Backward time: 4.02s Step time: 34.81s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 361 Examples seen:34656000 Validation Accuracy: 0.8237 Validation Error: 0.4661 Validation Loss: 0.5966 Total time: 716.68min\n", + "Epoch time: 1.7403 minutes. 500 epochs: 14.5021 hours.\n", + "Epochs: 361. Working time: 11.94 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were so he de.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "34688000 Examples seen. Accuracy: 0.8661 Error: 0.39688 Loss: 0.47785 Threads: 8 Forward time: 3.22s Backward time: 3.83s Step time: 34.82s\n", + "34720000 Examples seen. Accuracy: 0.8614 Error: 0.44355 Loss: 0.52716 Threads: 8 Forward time: 3.29s Backward time: 3.94s Step time: 35.02s\n", + "34752000 Examples seen. Accuracy: 0.8499 Error: 0.44324 Loss: 0.57496 Threads: 8 Forward time: 3.07s Backward time: 3.60s Step time: 33.96s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 362 Examples seen:34752000 Validation Accuracy: 0.8266 Validation Error: 0.4621 Validation Loss: 0.6008 Total time: 718.50min\n", + "Epoch time: 1.6978 minutes. 500 epochs: 14.1487 hours.\n", + "Epochs: 362. Working time: 11.97 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom and dad. they wan.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "34784000 Examples seen. Accuracy: 0.8600 Error: 0.31704 Loss: 0.32272 Threads: 8 Forward time: 3.18s Backward time: 3.81s Step time: 34.73s\n", + "34816000 Examples seen. Accuracy: 0.8670 Error: 0.39775 Loss: 0.45815 Threads: 8 Forward time: 3.26s Backward time: 4.05s Step time: 35.14s\n", + "34848000 Examples seen. Accuracy: 0.8578 Error: 0.38990 Loss: 0.43379 Threads: 8 Forward time: 3.10s Backward time: 3.53s Step time: 34.70s\n", + "Starting Validation.\n", + "Epochs: 363 Examples seen:34848000 Validation Accuracy: 0.8193 Validation Error: 0.4582 Validation Loss: 0.5935 Total time: 720.28min\n", + "Epoch time: 1.7348 minutes. 500 epochs: 14.4563 hours.\n", + "Epochs: 363. Working time: 12.00 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went for a walk with his mom. they wanted to play.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "34880000 Examples seen. Accuracy: 0.8651 Error: 0.31011 Loss: 0.31128 Threads: 8 Forward time: 2.99s Backward time: 3.54s Step time: 35.01s\n", + "34912000 Examples seen. Accuracy: 0.8761 Error: 0.29479 Loss: 0.31704 Threads: 8 Forward time: 3.16s Backward time: 3.82s Step time: 34.77s\n", + "34944000 Examples seen. Accuracy: 0.8649 Error: 0.38580 Loss: 0.47536 Threads: 8 Forward time: 4.09s Backward time: 4.81s Step time: 36.37s\n", + "Starting Validation.\n", + "Epochs: 364 Examples seen:34944000 Validation Accuracy: 0.8195 Validation Error: 0.4552 Validation Loss: 0.6046 Total time: 722.10min\n", + "Epoch time: 1.8184 minutes. 500 epochs: 15.1533 hours.\n", + "Epochs: 364. Working time: 12.03 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big sl.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "34976000 Examples seen. Accuracy: 0.8661 Error: 0.33686 Loss: 0.35613 Threads: 8 Forward time: 3.21s Backward time: 3.99s Step time: 36.49s\n", + "35008000 Examples seen. Accuracy: 0.8590 Error: 0.44997 Loss: 0.53492 Threads: 8 Forward time: 3.38s Backward time: 4.15s Step time: 35.86s\n", + "35040000 Examples seen. Accuracy: 0.8767 Error: 0.38837 Loss: 0.46650 Threads: 8 Forward time: 3.08s Backward time: 3.87s Step time: 34.82s\n", + "Starting Validation.\n", + "Epochs: 365 Examples seen:35040000 Validation Accuracy: 0.8193 Validation Error: 0.4463 Validation Loss: 0.6089 Total time: 723.93min\n", + "Epoch time: 1.7410 minutes. 500 epochs: 14.5087 hours.\n", + "Epochs: 365. Working time: 12.07 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were so excit.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "35072000 Examples seen. Accuracy: 0.8775 Error: 0.34650 Loss: 0.36248 Threads: 8 Forward time: 3.30s Backward time: 4.12s Step time: 35.79s\n", + "35104000 Examples seen. Accuracy: 0.8679 Error: 0.31169 Loss: 0.35146 Threads: 8 Forward time: 3.34s Backward time: 3.96s Step time: 35.55s\n", + "35136000 Examples seen. Accuracy: 0.8549 Error: 0.42018 Loss: 0.48612 Threads: 8 Forward time: 3.41s Backward time: 4.21s Step time: 35.14s\n", + "Starting Validation.\n", + "Epochs: 366 Examples seen:35136000 Validation Accuracy: 0.8170 Validation Error: 0.4655 Validation Loss: 0.6031 Total time: 725.75min\n", + "Epoch time: 1.7570 minutes. 500 epochs: 14.6421 hours.\n", + "Epochs: 366. Working time: 12.10 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they wanted to s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "35168000 Examples seen. Accuracy: 0.8567 Error: 0.40345 Loss: 0.45643 Threads: 8 Forward time: 3.47s Backward time: 4.17s Step time: 35.62s\n", + "35200000 Examples seen. Accuracy: 0.8523 Error: 0.40927 Loss: 0.55305 Threads: 8 Forward time: 3.40s Backward time: 4.01s Step time: 34.64s\n", + "35232000 Examples seen. Accuracy: 0.8473 Error: 0.40983 Loss: 0.46034 Threads: 8 Forward time: 3.22s Backward time: 3.82s Step time: 35.00s\n", + "Starting Validation.\n", + "Epochs: 367 Examples seen:35232000 Validation Accuracy: 0.8201 Validation Error: 0.4643 Validation Loss: 0.6014 Total time: 727.54min\n", + "Epoch time: 1.7500 minutes. 500 epochs: 14.5837 hours.\n", + "Epochs: 367. Working time: 12.13 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big pa.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "35264000 Examples seen. Accuracy: 0.8641 Error: 0.34452 Loss: 0.40079 Threads: 8 Forward time: 3.40s Backward time: 4.13s Step time: 34.66s\n", + "35296000 Examples seen. Accuracy: 0.8658 Error: 0.30726 Loss: 0.33314 Threads: 8 Forward time: 3.18s Backward time: 3.76s Step time: 35.10s\n", + "35328000 Examples seen. Accuracy: 0.8543 Error: 0.37568 Loss: 0.37457 Threads: 8 Forward time: 3.30s Backward time: 4.05s Step time: 35.07s\n", + "Starting Validation.\n", + "Epochs: 368 Examples seen:35328000 Validation Accuracy: 0.8157 Validation Error: 0.4638 Validation Loss: 0.6098 Total time: 729.33min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.950 Min Weight: -0.986 Max Output: 0.740 Min Output: -0.756 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.740 Min Output: -0.756 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.690 Min Weight: -0.689 Max Output: 4.099 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.07s Parent:2\n", + "Layer 4 Max Output: 4.099 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.099 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.368 Min Weight: -0.379 Max Output: 4.309 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.34s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.236 Min Weight: -0.239 Max Output: 3.302 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.60s 0.42s Parent:6\n", + "Layer 8 Max Output: 3.302 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.08s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.140 Min Weight: -0.187 Max Output: 1.773 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.70s 0.39s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.259 Min Weight: -0.209 Max Output: 3.741 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.038 Min Weight: -0.597 Max Output: 16.559 Min Output: -4.166 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.977 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7533 minutes. 500 epochs: 14.6104 hours.\n", + "Epochs: 368. Working time: 12.16 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sky and she .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "35360000 Examples seen. Accuracy: 0.8657 Error: 0.38927 Loss: 0.44843 Threads: 8 Forward time: 3.34s Backward time: 4.09s Step time: 35.27s\n", + "35392000 Examples seen. Accuracy: 0.8635 Error: 0.38307 Loss: 0.38312 Threads: 8 Forward time: 3.16s Backward time: 3.79s Step time: 35.72s\n", + "35424000 Examples seen. Accuracy: 0.8508 Error: 0.41495 Loss: 0.44169 Threads: 8 Forward time: 3.14s Backward time: 3.77s Step time: 35.50s\n", + "Starting Validation.\n", + "Epochs: 369 Examples seen:35424000 Validation Accuracy: 0.8220 Validation Error: 0.4669 Validation Loss: 0.5997 Total time: 731.15min\n", + "Epoch time: 1.7750 minutes. 500 epochs: 14.7917 hours.\n", + "Epochs: 369. Working time: 12.19 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they wanted to go .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "35456000 Examples seen. Accuracy: 0.8474 Error: 0.40621 Loss: 0.52243 Threads: 8 Forward time: 3.16s Backward time: 3.74s Step time: 34.99s\n", + "35488000 Examples seen. Accuracy: 0.8527 Error: 0.29099 Loss: 0.30654 Threads: 8 Forward time: 3.56s Backward time: 4.03s Step time: 35.21s\n", + "35520000 Examples seen. Accuracy: 0.8682 Error: 0.39609 Loss: 0.42872 Threads: 8 Forward time: 3.16s Backward time: 3.76s Step time: 34.80s\n", + "Starting Validation.\n", + "Epochs: 370 Examples seen:35520000 Validation Accuracy: 0.8235 Validation Error: 0.4498 Validation Loss: 0.6045 Total time: 732.95min\n", + "Starting Testing.\n", + "Epochs: 370 Examples seen:35520000 Test Accuracy: 0.8235 Test Error: 0.4498 Test Loss: 0.6045 Total time: 732.98min\n", + "Epoch time: 1.7400 minutes. 500 epochs: 14.4996 hours.\n", + "Epochs: 370. Working time: 12.22 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went for a walk. he saw a big box of things with .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "35552000 Examples seen. Accuracy: 0.8617 Error: 0.36494 Loss: 0.40152 Threads: 8 Forward time: 3.68s Backward time: 4.30s Step time: 35.35s\n", + "35584000 Examples seen. Accuracy: 0.8508 Error: 0.45107 Loss: 0.54167 Threads: 8 Forward time: 3.11s Backward time: 3.70s Step time: 34.66s\n", + "35616000 Examples seen. Accuracy: 0.8456 Error: 0.41080 Loss: 0.49304 Threads: 8 Forward time: 3.22s Backward time: 3.85s Step time: 34.28s\n", + "Starting Validation.\n", + "Epochs: 371 Examples seen:35616000 Validation Accuracy: 0.8207 Validation Error: 0.4618 Validation Loss: 0.5956 Total time: 734.76min\n", + "Epoch time: 1.7139 minutes. 500 epochs: 14.2829 hours.\n", + "Epochs: 371. Working time: 12.25 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "35648000 Examples seen. Accuracy: 0.8458 Error: 0.41082 Loss: 0.49455 Threads: 8 Forward time: 3.17s Backward time: 3.72s Step time: 34.59s\n", + "35680000 Examples seen. Accuracy: 0.8429 Error: 0.42253 Loss: 0.49549 Threads: 8 Forward time: 3.06s Backward time: 3.40s Step time: 34.20s\n", + "35712000 Examples seen. Accuracy: 0.8439 Error: 0.39554 Loss: 0.47587 Threads: 8 Forward time: 3.24s Backward time: 3.77s Step time: 34.21s\n", + "Starting Validation.\n", + "Epochs: 372 Examples seen:35712000 Validation Accuracy: 0.8207 Validation Error: 0.4632 Validation Loss: 0.5914 Total time: 736.52min\n", + "Epoch time: 1.7104 minutes. 500 epochs: 14.2533 hours.\n", + "Epochs: 372. Working time: 12.28 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "35744000 Examples seen. Accuracy: 0.8678 Error: 0.31622 Loss: 0.35630 Threads: 8 Forward time: 3.03s Backward time: 3.73s Step time: 34.40s\n", + "35776000 Examples seen. Accuracy: 0.8678 Error: 0.39645 Loss: 0.52270 Threads: 8 Forward time: 3.02s Backward time: 3.57s Step time: 34.94s\n", + "35808000 Examples seen. Accuracy: 0.8519 Error: 0.45528 Loss: 0.60021 Threads: 8 Forward time: 3.16s Backward time: 3.75s Step time: 34.40s\n", + "Starting Validation.\n", + "Epochs: 373 Examples seen:35808000 Validation Accuracy: 0.8203 Validation Error: 0.4662 Validation Loss: 0.5976 Total time: 738.29min\n", + "Epoch time: 1.7201 minutes. 500 epochs: 14.3342 hours.\n", + "Epochs: 373. Working time: 12.30 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "35840000 Examples seen. Accuracy: 0.8591 Error: 0.40474 Loss: 0.42992 Threads: 8 Forward time: 3.11s Backward time: 3.67s Step time: 34.63s\n", + "35872000 Examples seen. Accuracy: 0.8594 Error: 0.45798 Loss: 0.59976 Threads: 8 Forward time: 3.67s Backward time: 4.09s Step time: 34.73s\n", + "35904000 Examples seen. Accuracy: 0.8525 Error: 0.37949 Loss: 0.44724 Threads: 8 Forward time: 3.11s Backward time: 3.61s Step time: 34.80s\n", + "Starting Validation.\n", + "Epochs: 374 Examples seen:35904000 Validation Accuracy: 0.8199 Validation Error: 0.4631 Validation Loss: 0.6021 Total time: 740.07min\n", + "Epoch time: 1.7402 minutes. 500 epochs: 14.5017 hours.\n", + "Epochs: 374. Working time: 12.33 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "35936000 Examples seen. Accuracy: 0.8461 Error: 0.45707 Loss: 0.55779 Threads: 8 Forward time: 3.20s Backward time: 3.70s Step time: 35.15s\n", + "35968000 Examples seen. Accuracy: 0.8442 Error: 0.41936 Loss: 0.50761 Threads: 8 Forward time: 3.16s Backward time: 3.62s Step time: 33.95s\n", + "36000000 Examples seen. Accuracy: 0.8444 Error: 0.46839 Loss: 0.53900 Threads: 8 Forward time: 3.38s Backward time: 4.04s Step time: 33.85s\n", + "Starting Validation.\n", + "Epochs: 375 Examples seen:36000000 Validation Accuracy: 0.8141 Validation Error: 0.4708 Validation Loss: 0.6044 Total time: 741.83min\n", + "Epoch time: 1.6924 minutes. 500 epochs: 14.1033 hours.\n", + "Epochs: 375. Working time: 12.36 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "36032000 Examples seen. Accuracy: 0.8423 Error: 0.41418 Loss: 0.47278 Threads: 8 Forward time: 3.16s Backward time: 3.74s Step time: 34.07s\n", + "36064000 Examples seen. Accuracy: 0.8629 Error: 0.41442 Loss: 0.52733 Threads: 8 Forward time: 2.97s Backward time: 3.51s Step time: 34.24s\n", + "36096000 Examples seen. Accuracy: 0.8686 Error: 0.34713 Loss: 0.37848 Threads: 8 Forward time: 3.10s Backward time: 3.74s Step time: 34.55s\n", + "Starting Validation.\n", + "Epochs: 376 Examples seen:36096000 Validation Accuracy: 0.8197 Validation Error: 0.4560 Validation Loss: 0.6038 Total time: 743.59min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.950 Min Weight: -0.985 Max Output: 0.740 Min Output: -0.755 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.740 Min Output: -0.755 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.691 Min Weight: -0.694 Max Output: 4.131 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.17s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.131 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.131 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.370 Min Weight: -0.379 Max Output: 4.434 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.36s 0.25s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.230 Min Weight: -0.239 Max Output: 3.436 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.53s 0.39s Parent:6\n", + "Layer 8 Max Output: 3.436 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.139 Min Weight: -0.187 Max Output: 2.118 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.72s 0.36s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.209 Max Output: 4.258 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.043 Min Weight: -0.599 Max Output: 18.344 Min Output: -4.755 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.970 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7275 minutes. 500 epochs: 14.3954 hours.\n", + "Epochs: 376. Working time: 12.39 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl was walking in the park when she saw something special ski.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "36128000 Examples seen. Accuracy: 0.8597 Error: 0.44438 Loss: 0.51008 Threads: 8 Forward time: 3.40s Backward time: 4.22s Step time: 34.96s\n", + "36160000 Examples seen. Accuracy: 0.8510 Error: 0.43623 Loss: 0.50392 Threads: 8 Forward time: 3.31s Backward time: 3.96s Step time: 34.42s\n", + "36192000 Examples seen. Accuracy: 0.8461 Error: 0.47732 Loss: 0.67373 Threads: 8 Forward time: 3.54s Backward time: 4.20s Step time: 34.22s\n", + "Starting Validation.\n", + "Epochs: 377 Examples seen:36192000 Validation Accuracy: 0.8193 Validation Error: 0.4612 Validation Loss: 0.5896 Total time: 745.36min\n", + "Epoch time: 1.7110 minutes. 500 epochs: 14.2583 hours.\n", + "Epochs: 377. Working time: 12.42 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sky and she .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "36224000 Examples seen. Accuracy: 0.8461 Error: 0.40670 Loss: 0.47536 Threads: 8 Forward time: 3.10s Backward time: 3.57s Step time: 33.99s\n", + "36256000 Examples seen. Accuracy: 0.8455 Error: 0.37184 Loss: 0.43155 Threads: 8 Forward time: 3.23s Backward time: 3.60s Step time: 34.56s\n", + "36288000 Examples seen. Accuracy: 0.8451 Error: 0.41586 Loss: 0.46703 Threads: 8 Forward time: 3.33s Backward time: 3.88s Step time: 34.56s\n", + "Starting Validation.\n", + "Epochs: 378 Examples seen:36288000 Validation Accuracy: 0.8253 Validation Error: 0.4681 Validation Loss: 0.5887 Total time: 747.12min\n", + "Epoch time: 1.7278 minutes. 500 epochs: 14.3987 hours.\n", + "Epochs: 378. Working time: 12.45 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they wanted to s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "36320000 Examples seen. Accuracy: 0.8453 Error: 0.40977 Loss: 0.49589 Threads: 8 Forward time: 3.23s Backward time: 3.76s Step time: 34.31s\n", + "36352000 Examples seen. Accuracy: 0.8442 Error: 0.44829 Loss: 0.56037 Threads: 8 Forward time: 3.07s Backward time: 3.63s Step time: 34.49s\n", + "36384000 Examples seen. Accuracy: 0.8633 Error: 0.33725 Loss: 0.39882 Threads: 8 Forward time: 3.09s Backward time: 3.69s Step time: 34.39s\n", + "Starting Validation.\n", + "Epochs: 379 Examples seen:36384000 Validation Accuracy: 0.8191 Validation Error: 0.4475 Validation Loss: 0.6072 Total time: 748.88min\n", + "Epoch time: 1.7197 minutes. 500 epochs: 14.3312 hours.\n", + "Epochs: 379. Working time: 12.48 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "36416000 Examples seen. Accuracy: 0.8625 Error: 0.43314 Loss: 0.52917 Threads: 8 Forward time: 3.05s Backward time: 3.61s Step time: 34.99s\n", + "36448000 Examples seen. Accuracy: 0.8525 Error: 0.33605 Loss: 0.39923 Threads: 8 Forward time: 3.18s Backward time: 3.69s Step time: 34.21s\n", + "36480000 Examples seen. Accuracy: 0.8479 Error: 0.45086 Loss: 0.53536 Threads: 8 Forward time: 3.25s Backward time: 3.80s Step time: 34.40s\n", + "Starting Validation.\n", + "Epochs: 380 Examples seen:36480000 Validation Accuracy: 0.8187 Validation Error: 0.4678 Validation Loss: 0.6021 Total time: 750.65min\n", + "Starting Testing.\n", + "Epochs: 380 Examples seen:36480000 Test Accuracy: 0.8187 Test Error: 0.4678 Test Loss: 0.6021 Total time: 750.69min\n", + "Epoch time: 1.7199 minutes. 500 epochs: 14.3325 hours.\n", + "Epochs: 380. Working time: 12.51 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big st.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "36512000 Examples seen. Accuracy: 0.8462 Error: 0.40257 Loss: 0.47916 Threads: 8 Forward time: 3.32s Backward time: 4.00s Step time: 33.97s\n", + "36544000 Examples seen. Accuracy: 0.8446 Error: 0.36351 Loss: 0.43816 Threads: 8 Forward time: 3.22s Backward time: 3.78s Step time: 34.11s\n", + "36576000 Examples seen. Accuracy: 0.8469 Error: 0.36881 Loss: 0.42927 Threads: 8 Forward time: 3.34s Backward time: 3.80s Step time: 34.91s\n", + "Starting Validation.\n", + "Epochs: 381 Examples seen:36576000 Validation Accuracy: 0.8184 Validation Error: 0.4666 Validation Loss: 0.5907 Total time: 752.45min\n", + "Epoch time: 1.7453 minutes. 500 epochs: 14.5442 hours.\n", + "Epochs: 381. Working time: 12.54 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "36608000 Examples seen. Accuracy: 0.8449 Error: 0.37114 Loss: 0.41132 Threads: 8 Forward time: 3.21s Backward time: 3.72s Step time: 33.94s\n", + "36640000 Examples seen. Accuracy: 0.8524 Error: 0.32622 Loss: 0.37064 Threads: 8 Forward time: 3.37s Backward time: 4.22s Step time: 34.54s\n", + "36672000 Examples seen. Accuracy: 0.8662 Error: 0.39094 Loss: 0.50160 Threads: 8 Forward time: 3.21s Backward time: 3.79s Step time: 34.81s\n", + "Starting Validation.\n", + "Epochs: 382 Examples seen:36672000 Validation Accuracy: 0.8195 Validation Error: 0.4463 Validation Loss: 0.6022 Total time: 754.21min\n", + "Epoch time: 1.7405 minutes. 500 epochs: 14.5038 hours.\n", + "Epochs: 382. Working time: 12.57 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big po.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "36704000 Examples seen. Accuracy: 0.8586 Error: 0.37184 Loss: 0.42882 Threads: 8 Forward time: 3.35s Backward time: 3.95s Step time: 35.55s\n", + "36736000 Examples seen. Accuracy: 0.8639 Error: 0.29695 Loss: 0.29468 Threads: 8 Forward time: 3.23s Backward time: 3.88s Step time: 34.83s\n", + "36768000 Examples seen. Accuracy: 0.8722 Error: 0.39577 Loss: 0.48493 Threads: 8 Forward time: 2.99s Backward time: 3.53s Step time: 34.55s\n", + "Starting Validation.\n", + "Epochs: 383 Examples seen:36768000 Validation Accuracy: 0.8230 Validation Error: 0.4501 Validation Loss: 0.6031 Total time: 756.00min\n", + "Epoch time: 1.7273 minutes. 500 epochs: 14.3942 hours.\n", + "Epochs: 383. Working time: 12.60 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mommy. they saw a big.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "36800000 Examples seen. Accuracy: 0.8602 Error: 0.42605 Loss: 0.49303 Threads: 8 Forward time: 3.27s Backward time: 3.87s Step time: 35.30s\n", + "36832000 Examples seen. Accuracy: 0.8527 Error: 0.41810 Loss: 0.48742 Threads: 8 Forward time: 3.48s Backward time: 3.95s Step time: 34.62s\n", + "36864000 Examples seen. Accuracy: 0.8473 Error: 0.38295 Loss: 0.49049 Threads: 8 Forward time: 3.84s Backward time: 4.56s Step time: 34.05s\n", + "Starting Validation.\n", + "Epochs: 384 Examples seen:36864000 Validation Accuracy: 0.8141 Validation Error: 0.4691 Validation Loss: 0.5886 Total time: 757.78min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.950 Min Weight: -0.989 Max Output: 0.740 Min Output: -0.757 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.740 Min Output: -0.757 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.697 Min Weight: -0.691 Max Output: 4.149 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.07s Parent:2\n", + "Layer 4 Max Output: 4.149 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.149 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.371 Min Weight: -0.378 Max Output: 4.524 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.41s 0.31s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.231 Min Weight: -0.239 Max Output: 3.319 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.69s 0.48s Parent:6\n", + "Layer 8 Max Output: 3.319 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.136 Min Weight: -0.185 Max Output: 2.050 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.77s 0.44s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.262 Min Weight: -0.209 Max Output: 3.838 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.043 Min Weight: -0.599 Max Output: 16.052 Min Output: -4.193 TNNetFullConnectLinear 128,1,1 Times: 0.05s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.976 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7023 minutes. 500 epochs: 14.1858 hours.\n", + "Epochs: 384. Working time: 12.63 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park. she saw a big pile of the sto.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "36896000 Examples seen. Accuracy: 0.8661 Error: 0.36542 Loss: 0.41726 Threads: 8 Forward time: 3.24s Backward time: 3.86s Step time: 35.22s\n", + "36928000 Examples seen. Accuracy: 0.8648 Error: 0.36101 Loss: 0.38173 Threads: 8 Forward time: 3.06s Backward time: 3.55s Step time: 34.37s\n", + "36960000 Examples seen. Accuracy: 0.8585 Error: 0.38771 Loss: 0.52817 Threads: 8 Forward time: 3.28s Backward time: 3.78s Step time: 34.78s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 385 Examples seen:36960000 Validation Accuracy: 0.8268 Validation Error: 0.4550 Validation Loss: 0.5923 Total time: 759.61min\n", + "Epoch time: 1.7392 minutes. 500 epochs: 14.4937 hours.\n", + "Epochs: 385. Working time: 12.66 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park. she saw a big box on the star.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "36992000 Examples seen. Accuracy: 0.8636 Error: 0.32126 Loss: 0.33103 Threads: 8 Forward time: 3.22s Backward time: 4.05s Step time: 35.86s\n", + "37024000 Examples seen. Accuracy: 0.8707 Error: 0.41323 Loss: 0.53008 Threads: 8 Forward time: 3.43s Backward time: 4.13s Step time: 35.77s\n", + "37056000 Examples seen. Accuracy: 0.8611 Error: 0.41487 Loss: 0.47599 Threads: 8 Forward time: 3.26s Backward time: 3.89s Step time: 35.50s\n", + "Starting Validation.\n", + "Epochs: 386 Examples seen:37056000 Validation Accuracy: 0.8210 Validation Error: 0.4636 Validation Loss: 0.5906 Total time: 761.44min\n", + "Epoch time: 1.7752 minutes. 500 epochs: 14.7933 hours.\n", + "Epochs: 386. Working time: 12.69 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mommy. they saw a big.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37088000 Examples seen. Accuracy: 0.8655 Error: 0.36003 Loss: 0.39457 Threads: 8 Forward time: 3.29s Backward time: 3.85s Step time: 34.85s\n", + "37120000 Examples seen. Accuracy: 0.8714 Error: 0.33160 Loss: 0.38892 Threads: 8 Forward time: 3.75s Backward time: 4.40s Step time: 34.73s\n", + "37152000 Examples seen. Accuracy: 0.8548 Error: 0.41869 Loss: 0.49990 Threads: 8 Forward time: 3.32s Backward time: 3.83s Step time: 34.21s\n", + "Starting Validation.\n", + "Epochs: 387 Examples seen:37152000 Validation Accuracy: 0.8214 Validation Error: 0.4628 Validation Loss: 0.5855 Total time: 763.21min\n", + "Epoch time: 1.7106 minutes. 500 epochs: 14.2546 hours.\n", + "Epochs: 387. Working time: 12.72 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "37184000 Examples seen. Accuracy: 0.8485 Error: 0.40255 Loss: 0.48558 Threads: 8 Forward time: 3.36s Backward time: 4.08s Step time: 34.38s\n", + "37216000 Examples seen. Accuracy: 0.8454 Error: 0.42852 Loss: 0.54951 Threads: 8 Forward time: 3.22s Backward time: 3.72s Step time: 35.55s\n", + "37248000 Examples seen. Accuracy: 0.8449 Error: 0.37070 Loss: 0.41267 Threads: 8 Forward time: 3.19s Backward time: 3.66s Step time: 34.98s\n", + "Starting Validation.\n", + "Epochs: 388 Examples seen:37248000 Validation Accuracy: 0.8235 Validation Error: 0.4677 Validation Loss: 0.5854 Total time: 765.00min\n", + "Epoch time: 1.7492 minutes. 500 epochs: 14.5767 hours.\n", + "Epochs: 388. Working time: 12.75 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big bow on her boun.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37280000 Examples seen. Accuracy: 0.8479 Error: 0.40466 Loss: 0.47744 Threads: 8 Forward time: 3.32s Backward time: 3.88s Step time: 35.41s\n", + "37312000 Examples seen. Accuracy: 0.8562 Error: 0.28691 Loss: 0.31056 Threads: 8 Forward time: 3.24s Backward time: 3.97s Step time: 35.00s\n", + "37344000 Examples seen. Accuracy: 0.8742 Error: 0.36593 Loss: 0.35946 Threads: 8 Forward time: 3.32s Backward time: 3.95s Step time: 34.93s\n", + "Starting Validation.\n", + "Epochs: 389 Examples seen:37344000 Validation Accuracy: 0.8201 Validation Error: 0.4384 Validation Loss: 0.6067 Total time: 766.81min\n", + "Epoch time: 1.7466 minutes. 500 epochs: 14.5554 hours.\n", + "Epochs: 389. Working time: 12.78 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. tim saw a big tree.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37376000 Examples seen. Accuracy: 0.8738 Error: 0.38948 Loss: 0.44452 Threads: 8 Forward time: 3.22s Backward time: 3.78s Step time: 35.03s\n", + "37408000 Examples seen. Accuracy: 0.8600 Error: 0.37682 Loss: 0.40193 Threads: 8 Forward time: 3.26s Backward time: 3.88s Step time: 34.72s\n", + "37440000 Examples seen. Accuracy: 0.8501 Error: 0.40368 Loss: 0.46213 Threads: 8 Forward time: 3.33s Backward time: 3.94s Step time: 34.66s\n", + "Starting Validation.\n", + "Epochs: 390 Examples seen:37440000 Validation Accuracy: 0.8201 Validation Error: 0.4681 Validation Loss: 0.5946 Total time: 768.59min\n", + "Starting Testing.\n", + "Epochs: 390 Examples seen:37440000 Test Accuracy: 0.8201 Test Error: 0.4681 Test Loss: 0.5946 Total time: 768.63min\n", + "Epoch time: 1.7331 minutes. 500 epochs: 14.4425 hours.\n", + "Epochs: 390. Working time: 12.81 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37472000 Examples seen. Accuracy: 0.8476 Error: 0.41451 Loss: 0.48105 Threads: 8 Forward time: 3.39s Backward time: 4.04s Step time: 35.42s\n", + "37504000 Examples seen. Accuracy: 0.8458 Error: 0.38943 Loss: 0.42075 Threads: 8 Forward time: 3.47s Backward time: 4.14s Step time: 35.52s\n", + "37536000 Examples seen. Accuracy: 0.8455 Error: 0.39980 Loss: 0.43861 Threads: 8 Forward time: 3.20s Backward time: 3.78s Step time: 35.06s\n", + "Starting Validation.\n", + "Epochs: 391 Examples seen:37536000 Validation Accuracy: 0.8218 Validation Error: 0.4625 Validation Loss: 0.5843 Total time: 770.44min\n", + "Epoch time: 1.7531 minutes. 500 epochs: 14.6092 hours.\n", + "Epochs: 391. Working time: 12.84 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37568000 Examples seen. Accuracy: 0.8450 Error: 0.41888 Loss: 0.47110 Threads: 8 Forward time: 3.15s Backward time: 3.55s Step time: 34.39s\n", + "37600000 Examples seen. Accuracy: 0.8420 Error: 0.41952 Loss: 0.52753 Threads: 8 Forward time: 3.18s Backward time: 3.80s Step time: 34.13s\n", + "37632000 Examples seen. Accuracy: 0.8444 Error: 0.44648 Loss: 0.58015 Threads: 8 Forward time: 3.38s Backward time: 4.03s Step time: 34.28s\n", + "Starting Validation.\n", + "Epochs: 392 Examples seen:37632000 Validation Accuracy: 0.8207 Validation Error: 0.4576 Validation Loss: 0.5877 Total time: 772.20min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.951 Min Weight: -0.991 Max Output: 0.740 Min Output: -0.758 TNNetPointwiseConv 81,1,32 Times: 0.21s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.740 Min Output: -0.758 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.699 Min Weight: -0.697 Max Output: 4.182 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.08s Parent:2\n", + "Layer 4 Max Output: 4.182 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.00s Parent:3\n", + "Layer 5 Max Output: 4.182 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.371 Min Weight: -0.380 Max Output: 4.310 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.49s 0.28s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.234 Min Weight: -0.240 Max Output: 3.178 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.57s 0.41s Parent:6\n", + "Layer 8 Max Output: 3.178 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.00s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.141 Min Weight: -0.188 Max Output: 1.926 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.77s 0.39s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.261 Min Weight: -0.208 Max Output: 3.716 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.047 Min Weight: -0.601 Max Output: 16.221 Min Output: -4.196 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.930 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7140 minutes. 500 epochs: 14.2837 hours.\n", + "Epochs: 392. Working time: 12.87 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they were best f.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37664000 Examples seen. Accuracy: 0.8442 Error: 0.44781 Loss: 0.54706 Threads: 8 Forward time: 3.25s Backward time: 3.90s Step time: 33.62s\n", + "37696000 Examples seen. Accuracy: 0.8431 Error: 0.39642 Loss: 0.45282 Threads: 8 Forward time: 3.07s Backward time: 3.51s Step time: 34.04s\n", + "37728000 Examples seen. Accuracy: 0.8423 Error: 0.41207 Loss: 0.53269 Threads: 8 Forward time: 3.27s Backward time: 3.80s Step time: 33.85s\n", + "Starting Validation.\n", + "Epochs: 393 Examples seen:37728000 Validation Accuracy: 0.8239 Validation Error: 0.4695 Validation Loss: 0.5837 Total time: 773.93min\n", + "Epoch time: 1.6925 minutes. 500 epochs: 14.1038 hours.\n", + "Epochs: 393. Working time: 12.90 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they wanted to b.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "37760000 Examples seen. Accuracy: 0.8411 Error: 0.46860 Loss: 0.57831 Threads: 8 Forward time: 3.22s Backward time: 3.70s Step time: 34.30s\n", + "37792000 Examples seen. Accuracy: 0.8645 Error: 0.35459 Loss: 0.37601 Threads: 8 Forward time: 3.13s Backward time: 3.73s Step time: 34.63s\n", + "37824000 Examples seen. Accuracy: 0.8692 Error: 0.39798 Loss: 0.47412 Threads: 8 Forward time: 3.10s Backward time: 3.76s Step time: 34.98s\n", + "Starting Validation.\n", + "Epochs: 394 Examples seen:37824000 Validation Accuracy: 0.8218 Validation Error: 0.4448 Validation Loss: 0.5994 Total time: 775.71min\n", + "Epoch time: 1.7489 minutes. 500 epochs: 14.5742 hours.\n", + "Epochs: 394. Working time: 12.93 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37856000 Examples seen. Accuracy: 0.8603 Error: 0.40789 Loss: 0.46725 Threads: 8 Forward time: 3.17s Backward time: 3.73s Step time: 34.70s\n", + "37888000 Examples seen. Accuracy: 0.8523 Error: 0.37971 Loss: 0.46720 Threads: 8 Forward time: 3.27s Backward time: 3.91s Step time: 34.01s\n", + "37920000 Examples seen. Accuracy: 0.8540 Error: 0.37052 Loss: 0.39119 Threads: 8 Forward time: 3.34s Backward time: 4.04s Step time: 34.72s\n", + "Starting Validation.\n", + "Epochs: 395 Examples seen:37920000 Validation Accuracy: 0.8201 Validation Error: 0.4635 Validation Loss: 0.5882 Total time: 777.47min\n", + "Epoch time: 1.7359 minutes. 500 epochs: 14.4658 hours.\n", + "Epochs: 395. Working time: 12.96 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they were she sa.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "37952000 Examples seen. Accuracy: 0.8741 Error: 0.32237 Loss: 0.32175 Threads: 8 Forward time: 3.37s Backward time: 4.04s Step time: 34.40s\n", + "37984000 Examples seen. Accuracy: 0.8677 Error: 0.36800 Loss: 0.47012 Threads: 8 Forward time: 3.15s Backward time: 3.81s Step time: 34.41s\n", + "38016000 Examples seen. Accuracy: 0.8636 Error: 0.36265 Loss: 0.41812 Threads: 8 Forward time: 3.81s Backward time: 4.52s Step time: 34.48s\n", + "Starting Validation.\n", + "Epochs: 396 Examples seen:38016000 Validation Accuracy: 0.8237 Validation Error: 0.4700 Validation Loss: 0.5847 Total time: 779.24min\n", + "Epoch time: 1.7242 minutes. 500 epochs: 14.3683 hours.\n", + "Epochs: 396. Working time: 12.99 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim was walking in the park. he saw a big back with h.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "38048000 Examples seen. Accuracy: 0.8516 Error: 0.40419 Loss: 0.43650 Threads: 8 Forward time: 3.19s Backward time: 3.71s Step time: 34.31s\n", + "38080000 Examples seen. Accuracy: 0.8485 Error: 0.38665 Loss: 0.41805 Threads: 8 Forward time: 3.10s Backward time: 3.67s Step time: 34.38s\n", + "38112000 Examples seen. Accuracy: 0.8492 Error: 0.43494 Loss: 0.52444 Threads: 8 Forward time: 3.38s Backward time: 3.97s Step time: 34.23s\n", + "Starting Validation.\n", + "Epochs: 397 Examples seen:38112000 Validation Accuracy: 0.8168 Validation Error: 0.4698 Validation Loss: 0.6045 Total time: 781.00min\n", + "Epoch time: 1.7114 minutes. 500 epochs: 14.2621 hours.\n", + "Epochs: 397. Working time: 13.02 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom and dad. she saw .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "38144000 Examples seen. Accuracy: 0.8513 Error: 0.39539 Loss: 0.49586 Threads: 8 Forward time: 3.32s Backward time: 3.92s Step time: 34.77s\n", + "38176000 Examples seen. Accuracy: 0.8485 Error: 0.44406 Loss: 0.57556 Threads: 8 Forward time: 3.64s Backward time: 4.18s Step time: 34.69s\n", + "38208000 Examples seen. Accuracy: 0.8454 Error: 0.38671 Loss: 0.47706 Threads: 8 Forward time: 3.23s Backward time: 3.70s Step time: 34.55s\n", + "Starting Validation.\n", + "Epochs: 398 Examples seen:38208000 Validation Accuracy: 0.8260 Validation Error: 0.4555 Validation Loss: 0.5818 Total time: 782.77min\n", + "Epoch time: 1.7274 minutes. 500 epochs: 14.3946 hours.\n", + "Epochs: 398. Working time: 13.05 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "38240000 Examples seen. Accuracy: 0.8438 Error: 0.47605 Loss: 0.59144 Threads: 8 Forward time: 3.47s Backward time: 3.91s Step time: 35.66s\n", + "38272000 Examples seen. Accuracy: 0.8451 Error: 0.41713 Loss: 0.46288 Threads: 8 Forward time: 3.15s Backward time: 3.60s Step time: 33.92s\n", + "38304000 Examples seen. Accuracy: 0.8437 Error: 0.37889 Loss: 0.42557 Threads: 8 Forward time: 3.03s Backward time: 3.36s Step time: 34.09s\n", + "Starting Validation.\n", + "Epochs: 399 Examples seen:38304000 Validation Accuracy: 0.8251 Validation Error: 0.4743 Validation Loss: 0.5857 Total time: 784.55min\n", + "Epoch time: 1.7046 minutes. 500 epochs: 14.2054 hours.\n", + "Epochs: 399. Working time: 13.08 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they were the bi.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "38336000 Examples seen. Accuracy: 0.8448 Error: 0.35134 Loss: 0.39737 Threads: 8 Forward time: 3.35s Backward time: 4.01s Step time: 33.93s\n", + "38368000 Examples seen. Accuracy: 0.8468 Error: 0.41045 Loss: 0.47346 Threads: 8 Forward time: 3.00s Backward time: 3.43s Step time: 33.77s\n", + "38400000 Examples seen. Accuracy: 0.8444 Error: 0.47768 Loss: 0.53178 Threads: 8 Forward time: 3.27s Backward time: 3.86s Step time: 34.03s\n", + "Starting Validation.\n", + "Epochs: 400 Examples seen:38400000 Validation Accuracy: 0.8268 Validation Error: 0.4588 Validation Loss: 0.5787 Total time: 786.29min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.956 Min Weight: -0.990 Max Output: 0.742 Min Output: -0.758 TNNetPointwiseConv 81,1,32 Times: 0.21s 0.08s Parent:0\n", + "Layer 2 Max Output: 0.742 Min Output: -0.758 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.703 Min Weight: -0.698 Max Output: 4.227 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.13s 0.07s Parent:2\n", + "Layer 4 Max Output: 4.227 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.227 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.374 Min Weight: -0.384 Max Output: 4.496 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.27s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.233 Min Weight: -0.241 Max Output: 3.270 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.59s 0.40s Parent:6\n", + "Layer 8 Max Output: 3.270 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.08s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.140 Min Weight: -0.188 Max Output: 2.069 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.73s 0.35s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.209 Max Output: 3.970 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.051 Min Weight: -0.602 Max Output: 17.587 Min Output: -4.384 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.990 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 400 Examples seen:38400000 Test Accuracy: 0.8268 Test Error: 0.4588 Test Loss: 0.5787 Total time: 786.32min\n", + "Epoch time: 1.7014 minutes. 500 epochs: 14.1787 hours.\n", + "Epochs: 400. Working time: 13.11 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "38432000 Examples seen. Accuracy: 0.8442 Error: 0.42422 Loss: 0.58454 Threads: 8 Forward time: 3.04s Backward time: 3.43s Step time: 34.00s\n", + "38464000 Examples seen. Accuracy: 0.8464 Error: 0.37253 Loss: 0.43761 Threads: 8 Forward time: 3.40s Backward time: 4.00s Step time: 34.45s\n", + "38496000 Examples seen. Accuracy: 0.8527 Error: 0.43896 Loss: 0.51319 Threads: 8 Forward time: 3.46s Backward time: 3.88s Step time: 33.84s\n", + "Starting Validation.\n", + "Epochs: 401 Examples seen:38496000 Validation Accuracy: 0.8218 Validation Error: 0.4684 Validation Loss: 0.5802 Total time: 788.07min\n", + "Epoch time: 1.6919 minutes. 500 epochs: 14.0992 hours.\n", + "Epochs: 401. Working time: 13.13 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was playing in her room. she had a big box of h.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "38528000 Examples seen. Accuracy: 0.8466 Error: 0.43720 Loss: 0.45611 Threads: 8 Forward time: 3.14s Backward time: 3.57s Step time: 34.33s\n", + "38560000 Examples seen. Accuracy: 0.8697 Error: 0.30194 Loss: 0.30793 Threads: 8 Forward time: 3.21s Backward time: 3.96s Step time: 35.25s\n", + "38592000 Examples seen. Accuracy: 0.8667 Error: 0.35673 Loss: 0.47993 Threads: 8 Forward time: 3.20s Backward time: 3.79s Step time: 35.50s\n", + "Starting Validation.\n", + "Epochs: 402 Examples seen:38592000 Validation Accuracy: 0.8253 Validation Error: 0.4601 Validation Loss: 0.5881 Total time: 789.87min\n", + "Epoch time: 1.7748 minutes. 500 epochs: 14.7900 hours.\n", + "Epochs: 402. Working time: 13.16 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy was playing in her backyard when she saw a big .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "38624000 Examples seen. Accuracy: 0.8554 Error: 0.36036 Loss: 0.39114 Threads: 8 Forward time: 3.16s Backward time: 3.79s Step time: 35.38s\n", + "38656000 Examples seen. Accuracy: 0.8552 Error: 0.39724 Loss: 0.45123 Threads: 8 Forward time: 3.31s Backward time: 4.07s Step time: 33.84s\n", + "38688000 Examples seen. Accuracy: 0.8504 Error: 0.38069 Loss: 0.43253 Threads: 8 Forward time: 3.24s Backward time: 3.75s Step time: 35.34s\n", + "Starting Validation.\n", + "Epochs: 403 Examples seen:38688000 Validation Accuracy: 0.8226 Validation Error: 0.4613 Validation Loss: 0.5891 Total time: 791.66min\n", + "Epoch time: 1.7669 minutes. 500 epochs: 14.7238 hours.\n", + "Epochs: 403. Working time: 13.19 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "38720000 Examples seen. Accuracy: 0.8549 Error: 0.33201 Loss: 0.45638 Threads: 8 Forward time: 3.24s Backward time: 3.81s Step time: 34.92s\n", + "38752000 Examples seen. Accuracy: 0.8485 Error: 0.38874 Loss: 0.46920 Threads: 8 Forward time: 3.12s Backward time: 3.53s Step time: 34.36s\n", + "38784000 Examples seen. Accuracy: 0.8448 Error: 0.48562 Loss: 0.56498 Threads: 8 Forward time: 3.17s Backward time: 3.75s Step time: 33.62s\n", + "Starting Validation.\n", + "Epochs: 404 Examples seen:38784000 Validation Accuracy: 0.8260 Validation Error: 0.4599 Validation Loss: 0.5803 Total time: 793.42min\n", + "Epoch time: 1.6812 minutes. 500 epochs: 14.0096 hours.\n", + "Epochs: 404. Working time: 13.22 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they wanted to g.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "38816000 Examples seen. Accuracy: 0.8477 Error: 0.43475 Loss: 0.52565 Threads: 8 Forward time: 3.47s Backward time: 4.19s Step time: 34.31s\n", + "38848000 Examples seen. Accuracy: 0.8622 Error: 0.28579 Loss: 0.29999 Threads: 8 Forward time: 3.24s Backward time: 3.80s Step time: 35.65s\n", + "38880000 Examples seen. Accuracy: 0.8687 Error: 0.36162 Loss: 0.38573 Threads: 8 Forward time: 3.20s Backward time: 3.82s Step time: 34.80s\n", + "Starting Validation.\n", + "Epochs: 405 Examples seen:38880000 Validation Accuracy: 0.8258 Validation Error: 0.4464 Validation Loss: 0.5845 Total time: 795.20min\n", + "Epoch time: 1.7402 minutes. 500 epochs: 14.5017 hours.\n", + "Epochs: 405. Working time: 13.25 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "38912000 Examples seen. Accuracy: 0.8622 Error: 0.33583 Loss: 0.37757 Threads: 8 Forward time: 3.31s Backward time: 3.84s Step time: 33.98s\n", + "38944000 Examples seen. Accuracy: 0.8559 Error: 0.33735 Loss: 0.37878 Threads: 8 Forward time: 3.03s Backward time: 3.44s Step time: 34.10s\n", + "38976000 Examples seen. Accuracy: 0.8493 Error: 0.44186 Loss: 0.52248 Threads: 8 Forward time: 3.23s Backward time: 3.78s Step time: 34.13s\n", + "Starting Validation.\n", + "Epochs: 406 Examples seen:38976000 Validation Accuracy: 0.8205 Validation Error: 0.4634 Validation Loss: 0.5860 Total time: 796.95min\n", + "Epoch time: 1.7063 minutes. 500 epochs: 14.2196 hours.\n", + "Epochs: 406. Working time: 13.28 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they wanted to s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "39008000 Examples seen. Accuracy: 0.8462 Error: 0.44940 Loss: 0.53544 Threads: 8 Forward time: 3.48s Backward time: 4.13s Step time: 34.20s\n", + "39040000 Examples seen. Accuracy: 0.8470 Error: 0.41380 Loss: 0.48783 Threads: 8 Forward time: 3.53s Backward time: 4.13s Step time: 33.72s\n", + "39072000 Examples seen. Accuracy: 0.8481 Error: 0.36046 Loss: 0.43582 Threads: 8 Forward time: 3.26s Backward time: 3.66s Step time: 34.65s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 407 Examples seen:39072000 Validation Accuracy: 0.8274 Validation Error: 0.4573 Validation Loss: 0.5754 Total time: 798.75min\n", + "Epoch time: 1.7323 minutes. 500 epochs: 14.4358 hours.\n", + "Epochs: 407. Working time: 13.31 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "39104000 Examples seen. Accuracy: 0.8482 Error: 0.38896 Loss: 0.47473 Threads: 8 Forward time: 3.24s Backward time: 3.80s Step time: 34.68s\n", + "39136000 Examples seen. Accuracy: 0.8582 Error: 0.39442 Loss: 0.49491 Threads: 8 Forward time: 3.27s Backward time: 3.79s Step time: 34.40s\n", + "39168000 Examples seen. Accuracy: 0.8512 Error: 0.37313 Loss: 0.44723 Threads: 8 Forward time: 3.07s Backward time: 3.72s Step time: 34.74s\n", + "Starting Validation.\n", + "Epochs: 408 Examples seen:39168000 Validation Accuracy: 0.8237 Validation Error: 0.4598 Validation Loss: 0.5866 Total time: 800.52min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.952 Min Weight: -0.991 Max Output: 0.741 Min Output: -0.758 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.741 Min Output: -0.758 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.701 Min Weight: -0.697 Max Output: 4.267 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.07s Parent:2\n", + "Layer 4 Max Output: 4.267 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.267 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.374 Min Weight: -0.386 Max Output: 4.556 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.36s 0.25s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.238 Min Weight: -0.243 Max Output: 3.114 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.48s 0.37s Parent:6\n", + "Layer 8 Max Output: 3.114 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.138 Min Weight: -0.193 Max Output: 2.022 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.66s 0.36s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.261 Min Weight: -0.209 Max Output: 3.954 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.053 Min Weight: -0.604 Max Output: 16.345 Min Output: -4.121 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.876 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7370 minutes. 500 epochs: 14.4750 hours.\n", + "Epochs: 408. Working time: 13.34 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "39200000 Examples seen. Accuracy: 0.8478 Error: 0.39098 Loss: 0.40814 Threads: 8 Forward time: 3.17s Backward time: 3.62s Step time: 34.22s\n", + "39232000 Examples seen. Accuracy: 0.8696 Error: 0.32274 Loss: 0.35051 Threads: 8 Forward time: 3.06s Backward time: 3.73s Step time: 34.84s\n", + "39264000 Examples seen. Accuracy: 0.8617 Error: 0.41091 Loss: 0.47500 Threads: 8 Forward time: 3.26s Backward time: 3.85s Step time: 35.34s\n", + "Starting Validation.\n", + "Epochs: 409 Examples seen:39264000 Validation Accuracy: 0.8199 Validation Error: 0.4651 Validation Loss: 0.5913 Total time: 802.31min\n", + "Epoch time: 1.7672 minutes. 500 epochs: 14.7271 hours.\n", + "Epochs: 409. Working time: 13.37 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they wanted to fin.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "39296000 Examples seen. Accuracy: 0.8527 Error: 0.36362 Loss: 0.45566 Threads: 8 Forward time: 3.41s Backward time: 4.03s Step time: 34.73s\n", + "39328000 Examples seen. Accuracy: 0.8626 Error: 0.35203 Loss: 0.38798 Threads: 8 Forward time: 3.54s Backward time: 4.30s Step time: 34.49s\n", + "39360000 Examples seen. Accuracy: 0.8517 Error: 0.34148 Loss: 0.40532 Threads: 8 Forward time: 3.32s Backward time: 3.85s Step time: 35.02s\n", + "Starting Validation.\n", + "Epochs: 410 Examples seen:39360000 Validation Accuracy: 0.8243 Validation Error: 0.4570 Validation Loss: 0.5750 Total time: 804.09min\n", + "Starting Testing.\n", + "Epochs: 410 Examples seen:39360000 Test Accuracy: 0.8243 Test Error: 0.4570 Test Loss: 0.5750 Total time: 804.12min\n", + "Epoch time: 1.7512 minutes. 500 epochs: 14.5929 hours.\n", + "Epochs: 410. Working time: 13.40 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "39392000 Examples seen. Accuracy: 0.8535 Error: 0.34265 Loss: 0.41321 Threads: 8 Forward time: 3.25s Backward time: 3.70s Step time: 34.94s\n", + "39424000 Examples seen. Accuracy: 0.8700 Error: 0.30192 Loss: 0.36703 Threads: 8 Forward time: 3.61s Backward time: 4.06s Step time: 35.04s\n", + "39456000 Examples seen. Accuracy: 0.8684 Error: 0.32238 Loss: 0.33836 Threads: 8 Forward time: 3.40s Backward time: 4.11s Step time: 34.67s\n", + "Starting Validation.\n", + "Epochs: 411 Examples seen:39456000 Validation Accuracy: 0.8258 Validation Error: 0.4453 Validation Loss: 0.5781 Total time: 805.91min\n", + "Epoch time: 1.7336 minutes. 500 epochs: 14.4467 hours.\n", + "Epochs: 411. Working time: 13.43 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named sue went to the park with her mom. she saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "39488000 Examples seen. Accuracy: 0.8719 Error: 0.40727 Loss: 0.48405 Threads: 8 Forward time: 3.40s Backward time: 4.02s Step time: 35.25s\n", + "39520000 Examples seen. Accuracy: 0.8808 Error: 0.32222 Loss: 0.34347 Threads: 8 Forward time: 3.12s Backward time: 3.67s Step time: 35.13s\n", + "39552000 Examples seen. Accuracy: 0.8803 Error: 0.35797 Loss: 0.43179 Threads: 8 Forward time: 3.17s Backward time: 3.71s Step time: 35.25s\n", + "Starting Validation.\n", + "Epochs: 412 Examples seen:39552000 Validation Accuracy: 0.8191 Validation Error: 0.4554 Validation Loss: 0.5842 Total time: 807.71min\n", + "Epoch time: 1.7623 minutes. 500 epochs: 14.6858 hours.\n", + "Epochs: 412. Working time: 13.46 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named sue went to the park with her mom. she saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "39584000 Examples seen. Accuracy: 0.8589 Error: 0.37967 Loss: 0.45670 Threads: 8 Forward time: 3.22s Backward time: 3.77s Step time: 34.44s\n", + "39616000 Examples seen. Accuracy: 0.8524 Error: 0.38565 Loss: 0.43655 Threads: 8 Forward time: 3.30s Backward time: 3.92s Step time: 34.66s\n", + "39648000 Examples seen. Accuracy: 0.8506 Error: 0.41682 Loss: 0.45484 Threads: 8 Forward time: 3.27s Backward time: 3.91s Step time: 34.56s\n", + "Starting Validation.\n", + "Epochs: 413 Examples seen:39648000 Validation Accuracy: 0.8255 Validation Error: 0.4619 Validation Loss: 0.5744 Total time: 809.49min\n", + "Epoch time: 1.7280 minutes. 500 epochs: 14.4000 hours.\n", + "Epochs: 413. Working time: 13.49 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big box.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "39680000 Examples seen. Accuracy: 0.8504 Error: 0.38240 Loss: 0.45149 Threads: 8 Forward time: 3.24s Backward time: 3.72s Step time: 34.72s\n", + "39712000 Examples seen. Accuracy: 0.8485 Error: 0.42894 Loss: 0.51866 Threads: 8 Forward time: 3.18s Backward time: 3.63s Step time: 33.85s\n", + "39744000 Examples seen. Accuracy: 0.8497 Error: 0.34327 Loss: 0.38858 Threads: 8 Forward time: 3.30s Backward time: 3.90s Step time: 34.44s\n", + "Starting Validation.\n", + "Epochs: 414 Examples seen:39744000 Validation Accuracy: 0.8268 Validation Error: 0.4600 Validation Loss: 0.5715 Total time: 811.25min\n", + "Epoch time: 1.7220 minutes. 500 epochs: 14.3500 hours.\n", + "Epochs: 414. Working time: 13.52 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw many to.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "39776000 Examples seen. Accuracy: 0.8706 Error: 0.35634 Loss: 0.37607 Threads: 8 Forward time: 3.22s Backward time: 3.86s Step time: 34.60s\n", + "39808000 Examples seen. Accuracy: 0.8700 Error: 0.35223 Loss: 0.38127 Threads: 8 Forward time: 3.00s Backward time: 3.46s Step time: 34.86s\n", + "39840000 Examples seen. Accuracy: 0.8617 Error: 0.40918 Loss: 0.52948 Threads: 8 Forward time: 3.02s Backward time: 3.45s Step time: 33.98s\n", + "Starting Validation.\n", + "Epochs: 415 Examples seen:39840000 Validation Accuracy: 0.8247 Validation Error: 0.4536 Validation Loss: 0.5731 Total time: 813.01min\n", + "Epoch time: 1.6991 minutes. 500 epochs: 14.1588 hours.\n", + "Epochs: 415. Working time: 13.55 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lucy went to the park with her mom. the sky and she .\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "39872000 Examples seen. Accuracy: 0.8513 Error: 0.39064 Loss: 0.50694 Threads: 8 Forward time: 3.21s Backward time: 3.75s Step time: 33.97s\n", + "39904000 Examples seen. Accuracy: 0.8677 Error: 0.39132 Loss: 0.45229 Threads: 8 Forward time: 3.23s Backward time: 3.86s Step time: 34.68s\n", + "39936000 Examples seen. Accuracy: 0.8535 Error: 0.45328 Loss: 0.56547 Threads: 8 Forward time: 3.26s Backward time: 3.64s Step time: 34.07s\n", + "Starting Validation.\n", + "Epochs: 416 Examples seen:39936000 Validation Accuracy: 0.8220 Validation Error: 0.4609 Validation Loss: 0.5692 Total time: 814.77min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.958 Min Weight: -0.995 Max Output: 0.744 Min Output: -0.759 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.744 Min Output: -0.759 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.708 Min Weight: -0.701 Max Output: 4.258 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.258 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.00s Parent:3\n", + "Layer 5 Max Output: 4.258 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.378 Min Weight: -0.384 Max Output: 4.648 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.42s 0.26s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.238 Min Weight: -0.241 Max Output: 3.381 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.49s 0.39s Parent:6\n", + "Layer 8 Max Output: 3.381 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.141 Min Weight: -0.194 Max Output: 1.983 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.70s 0.34s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.262 Min Weight: -0.210 Max Output: 3.860 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.03s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.060 Min Weight: -0.607 Max Output: 16.580 Min Output: -4.314 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.978 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7035 minutes. 500 epochs: 14.1962 hours.\n", + "Epochs: 416. Working time: 13.58 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily was playing in her backyard. she saw a big blue.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "39968000 Examples seen. Accuracy: 0.8571 Error: 0.34901 Loss: 0.36204 Threads: 8 Forward time: 3.20s Backward time: 3.85s Step time: 35.17s\n", + "40000000 Examples seen. Accuracy: 0.8744 Error: 0.35319 Loss: 0.38628 Threads: 8 Forward time: 3.07s Backward time: 3.69s Step time: 34.49s\n", + "40032000 Examples seen. Accuracy: 0.8671 Error: 0.41574 Loss: 0.46651 Threads: 8 Forward time: 3.03s Backward time: 3.63s Step time: 34.75s\n", + "Starting Validation.\n", + "Epochs: 417 Examples seen:40032000 Validation Accuracy: 0.8212 Validation Error: 0.4502 Validation Loss: 0.5714 Total time: 816.55min\n", + "Epoch time: 1.7376 minutes. 500 epochs: 14.4800 hours.\n", + "Epochs: 417. Working time: 13.61 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "40064000 Examples seen. Accuracy: 0.8591 Error: 0.42976 Loss: 0.57538 Threads: 8 Forward time: 3.75s Backward time: 4.28s Step time: 34.53s\n", + "40096000 Examples seen. Accuracy: 0.8724 Error: 0.34907 Loss: 0.40589 Threads: 8 Forward time: 3.34s Backward time: 4.17s Step time: 34.93s\n", + "40128000 Examples seen. Accuracy: 0.8883 Error: 0.34774 Loss: 0.39430 Threads: 8 Forward time: 3.29s Backward time: 4.12s Step time: 35.87s\n", + "Starting Validation.\n", + "Epochs: 418 Examples seen:40128000 Validation Accuracy: 0.8187 Validation Error: 0.4340 Validation Loss: 0.6045 Total time: 818.35min\n", + "Epoch time: 1.7934 minutes. 500 epochs: 14.9446 hours.\n", + "Epochs: 418. Working time: 13.64 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "40160000 Examples seen. Accuracy: 0.8801 Error: 0.34200 Loss: 0.41010 Threads: 8 Forward time: 3.14s Backward time: 3.79s Step time: 35.10s\n", + "40192000 Examples seen. Accuracy: 0.8714 Error: 0.37281 Loss: 0.42011 Threads: 8 Forward time: 3.25s Backward time: 3.95s Step time: 36.46s\n", + "40224000 Examples seen. Accuracy: 0.8590 Error: 0.39060 Loss: 0.47819 Threads: 8 Forward time: 3.27s Backward time: 3.94s Step time: 36.10s\n", + "Starting Validation.\n", + "Epochs: 419 Examples seen:40224000 Validation Accuracy: 0.8216 Validation Error: 0.4555 Validation Loss: 0.5748 Total time: 820.19min\n", + "Epoch time: 1.8052 minutes. 500 epochs: 15.0433 hours.\n", + "Epochs: 419. Working time: 13.67 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom and dad. the sun .\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "40256000 Examples seen. Accuracy: 0.8689 Error: 0.30894 Loss: 0.29964 Threads: 8 Forward time: 3.29s Backward time: 3.94s Step time: 35.32s\n", + "40288000 Examples seen. Accuracy: 0.8703 Error: 0.32908 Loss: 0.34213 Threads: 8 Forward time: 3.23s Backward time: 3.89s Step time: 34.94s\n", + "40320000 Examples seen. Accuracy: 0.8775 Error: 0.25724 Loss: 0.27387 Threads: 8 Forward time: 3.43s Backward time: 4.25s Step time: 34.49s\n", + "Starting Validation.\n", + "Epochs: 420 Examples seen:40320000 Validation Accuracy: 0.8255 Validation Error: 0.4256 Validation Loss: 0.5941 Total time: 821.98min\n", + "Starting Testing.\n", + "Epochs: 420 Examples seen:40320000 Test Accuracy: 0.8255 Test Error: 0.4256 Test Loss: 0.5941 Total time: 822.02min\n", + "Epoch time: 1.7244 minutes. 500 epochs: 14.3700 hours.\n", + "Epochs: 420. Working time: 13.70 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were playing .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "40352000 Examples seen. Accuracy: 0.8680 Error: 0.39343 Loss: 0.45549 Threads: 8 Forward time: 3.12s Backward time: 3.60s Step time: 35.17s\n", + "40384000 Examples seen. Accuracy: 0.8708 Error: 0.30531 Loss: 0.28765 Threads: 8 Forward time: 3.30s Backward time: 3.91s Step time: 34.40s\n", + "40416000 Examples seen. Accuracy: 0.8794 Error: 0.32796 Loss: 0.36431 Threads: 8 Forward time: 3.50s Backward time: 3.97s Step time: 36.16s\n", + "Starting Validation.\n", + "Epochs: 421 Examples seen:40416000 Validation Accuracy: 0.8235 Validation Error: 0.4425 Validation Loss: 0.5872 Total time: 823.82min\n", + "Epoch time: 1.8078 minutes. 500 epochs: 15.0654 hours.\n", + "Epochs: 421. Working time: 13.73 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they went to the s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "40448000 Examples seen. Accuracy: 0.8661 Error: 0.41126 Loss: 0.49468 Threads: 8 Forward time: 3.22s Backward time: 3.90s Step time: 34.87s\n", + "40480000 Examples seen. Accuracy: 0.8541 Error: 0.36323 Loss: 0.45826 Threads: 8 Forward time: 3.23s Backward time: 3.72s Step time: 33.86s\n", + "40512000 Examples seen. Accuracy: 0.8559 Error: 0.34749 Loss: 0.39288 Threads: 8 Forward time: 3.09s Backward time: 3.50s Step time: 34.19s\n", + "Starting Validation.\n", + "Epochs: 422 Examples seen:40512000 Validation Accuracy: 0.8245 Validation Error: 0.4594 Validation Loss: 0.5859 Total time: 825.58min\n", + "Epoch time: 1.7096 minutes. 500 epochs: 14.2467 hours.\n", + "Epochs: 422. Working time: 13.76 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named mia was playing in her backyard. she saw a big box o.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "40544000 Examples seen. Accuracy: 0.8496 Error: 0.37054 Loss: 0.41581 Threads: 8 Forward time: 3.36s Backward time: 3.99s Step time: 33.76s\n", + "40576000 Examples seen. Accuracy: 0.8471 Error: 0.46136 Loss: 0.56449 Threads: 8 Forward time: 3.62s Backward time: 4.19s Step time: 34.14s\n", + "40608000 Examples seen. Accuracy: 0.8514 Error: 0.37040 Loss: 0.44009 Threads: 8 Forward time: 3.00s Backward time: 3.43s Step time: 34.21s\n", + "Starting Validation.\n", + "Epochs: 423 Examples seen:40608000 Validation Accuracy: 0.8251 Validation Error: 0.4478 Validation Loss: 0.5848 Total time: 827.33min\n", + "Epoch time: 1.7105 minutes. 500 epochs: 14.2542 hours.\n", + "Epochs: 423. Working time: 13.79 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "40640000 Examples seen. Accuracy: 0.8543 Error: 0.34341 Loss: 0.35323 Threads: 8 Forward time: 3.20s Backward time: 3.67s Step time: 33.82s\n", + "40672000 Examples seen. Accuracy: 0.8781 Error: 0.32039 Loss: 0.33123 Threads: 8 Forward time: 3.10s Backward time: 3.73s Step time: 34.32s\n", + "40704000 Examples seen. Accuracy: 0.8698 Error: 0.41237 Loss: 0.47117 Threads: 8 Forward time: 3.01s Backward time: 3.40s Step time: 34.26s\n", + "Starting Validation.\n", + "Epochs: 424 Examples seen:40704000 Validation Accuracy: 0.8230 Validation Error: 0.4548 Validation Loss: 0.5811 Total time: 829.08min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.964 Min Weight: -1.001 Max Output: 0.746 Min Output: -0.762 TNNetPointwiseConv 81,1,32 Times: 0.17s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.746 Min Output: -0.762 TNNetPadXY 83,1,32 Times: 0.01s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.715 Min Weight: -0.710 Max Output: 4.333 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.333 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.333 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.380 Min Weight: -0.385 Max Output: 4.643 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.45s 0.23s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.238 Min Weight: -0.243 Max Output: 3.420 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.45s 0.35s Parent:6\n", + "Layer 8 Max Output: 3.420 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.141 Min Weight: -0.194 Max Output: 2.139 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.66s 0.30s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.211 Max Output: 4.063 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.068 Min Weight: -0.610 Max Output: 16.928 Min Output: -4.433 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.02s Parent:10\n", + "Layer 12 Max Output: 0.987 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7128 minutes. 500 epochs: 14.2733 hours.\n", + "Epochs: 424. Working time: 13.82 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "40736000 Examples seen. Accuracy: 0.8542 Error: 0.53908 Loss: 0.65106 Threads: 8 Forward time: 3.10s Backward time: 3.47s Step time: 33.94s\n", + "40768000 Examples seen. Accuracy: 0.8640 Error: 0.27361 Loss: 0.25558 Threads: 8 Forward time: 3.29s Backward time: 3.90s Step time: 34.00s\n", + "40800000 Examples seen. Accuracy: 0.8578 Error: 0.45291 Loss: 0.57011 Threads: 8 Forward time: 3.11s Backward time: 3.59s Step time: 34.43s\n", + "Starting Validation.\n", + "Epochs: 425 Examples seen:40800000 Validation Accuracy: 0.8249 Validation Error: 0.4593 Validation Loss: 0.5770 Total time: 830.83min\n", + "Epoch time: 1.7217 minutes. 500 epochs: 14.3475 hours.\n", + "Epochs: 425. Working time: 13.85 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they wanted to see.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "40832000 Examples seen. Accuracy: 0.8509 Error: 0.39640 Loss: 0.46382 Threads: 8 Forward time: 3.26s Backward time: 3.75s Step time: 33.71s\n", + "40864000 Examples seen. Accuracy: 0.8473 Error: 0.39135 Loss: 0.48606 Threads: 8 Forward time: 3.37s Backward time: 3.94s Step time: 34.35s\n", + "40896000 Examples seen. Accuracy: 0.8467 Error: 0.41625 Loss: 0.52518 Threads: 8 Forward time: 3.21s Backward time: 3.68s Step time: 34.14s\n", + "Starting Validation.\n", + "Epochs: 426 Examples seen:40896000 Validation Accuracy: 0.8266 Validation Error: 0.4566 Validation Loss: 0.5725 Total time: 832.57min\n", + "Epoch time: 1.7070 minutes. 500 epochs: 14.2250 hours.\n", + "Epochs: 426. Working time: 13.88 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named maria was playing in her room. she was having a bad .\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "40928000 Examples seen. Accuracy: 0.8461 Error: 0.38122 Loss: 0.45386 Threads: 8 Forward time: 3.41s Backward time: 3.97s Step time: 33.81s\n", + "40960000 Examples seen. Accuracy: 0.8452 Error: 0.37705 Loss: 0.43992 Threads: 8 Forward time: 3.30s Backward time: 3.87s Step time: 33.57s\n", + "40992000 Examples seen. Accuracy: 0.8449 Error: 0.42320 Loss: 0.50429 Threads: 8 Forward time: 3.09s Backward time: 3.49s Step time: 34.30s\n", + "Starting Validation.\n", + "Epochs: 427 Examples seen:40992000 Validation Accuracy: 0.8239 Validation Error: 0.4517 Validation Loss: 0.5816 Total time: 834.31min\n", + "Epoch time: 1.7152 minutes. 500 epochs: 14.2937 hours.\n", + "Epochs: 427. Working time: 13.91 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom and dad. they wer.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41024000 Examples seen. Accuracy: 0.8590 Error: 0.37622 Loss: 0.55015 Threads: 8 Forward time: 3.30s Backward time: 3.89s Step time: 34.56s\n", + "41056000 Examples seen. Accuracy: 0.8693 Error: 0.30062 Loss: 0.37629 Threads: 8 Forward time: 3.15s Backward time: 3.80s Step time: 34.48s\n", + "41088000 Examples seen. Accuracy: 0.8798 Error: 0.32515 Loss: 0.34616 Threads: 8 Forward time: 3.20s Backward time: 3.66s Step time: 34.57s\n", + "Starting Validation.\n", + "Epochs: 428 Examples seen:41088000 Validation Accuracy: 0.8268 Validation Error: 0.4409 Validation Loss: 0.5949 Total time: 836.08min\n", + "Epoch time: 1.7284 minutes. 500 epochs: 14.4029 hours.\n", + "Epochs: 428. Working time: 13.93 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they wanted to exp.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "41120000 Examples seen. Accuracy: 0.8712 Error: 0.38384 Loss: 0.44860 Threads: 8 Forward time: 3.18s Backward time: 3.79s Step time: 34.85s\n", + "41152000 Examples seen. Accuracy: 0.8554 Error: 0.38530 Loss: 0.44475 Threads: 8 Forward time: 3.15s Backward time: 3.61s Step time: 34.25s\n", + "41184000 Examples seen. Accuracy: 0.8572 Error: 0.37404 Loss: 0.48443 Threads: 8 Forward time: 3.23s Backward time: 3.52s Step time: 34.01s\n", + "Starting Validation.\n", + "Epochs: 429 Examples seen:41184000 Validation Accuracy: 0.8253 Validation Error: 0.4537 Validation Loss: 0.5745 Total time: 837.84min\n", + "Epoch time: 1.7003 minutes. 500 epochs: 14.1692 hours.\n", + "Epochs: 429. Working time: 13.96 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41216000 Examples seen. Accuracy: 0.8512 Error: 0.40245 Loss: 0.48632 Threads: 8 Forward time: 3.07s Backward time: 3.62s Step time: 33.99s\n", + "41248000 Examples seen. Accuracy: 0.8488 Error: 0.41689 Loss: 0.52059 Threads: 8 Forward time: 3.18s Backward time: 3.66s Step time: 33.36s\n", + "41280000 Examples seen. Accuracy: 0.8482 Error: 0.41255 Loss: 0.50481 Threads: 8 Forward time: 3.07s Backward time: 3.51s Step time: 34.15s\n", + "Starting Validation.\n", + "Epochs: 430 Examples seen:41280000 Validation Accuracy: 0.8268 Validation Error: 0.4618 Validation Loss: 0.5761 Total time: 839.58min\n", + "Starting Testing.\n", + "Epochs: 430 Examples seen:41280000 Test Accuracy: 0.8268 Test Error: 0.4618 Test Loss: 0.5761 Total time: 839.61min\n", + "Epoch time: 1.7077 minutes. 500 epochs: 14.2312 hours.\n", + "Epochs: 430. Working time: 13.99 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. tim saw a big shin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41312000 Examples seen. Accuracy: 0.8487 Error: 0.45963 Loss: 0.54981 Threads: 8 Forward time: 3.15s Backward time: 3.63s Step time: 33.96s\n", + "41344000 Examples seen. Accuracy: 0.8484 Error: 0.35470 Loss: 0.43475 Threads: 8 Forward time: 3.13s Backward time: 3.63s Step time: 33.92s\n", + "41376000 Examples seen. Accuracy: 0.8473 Error: 0.40519 Loss: 0.52648 Threads: 8 Forward time: 3.07s Backward time: 3.48s Step time: 33.26s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 431 Examples seen:41376000 Validation Accuracy: 0.8278 Validation Error: 0.4523 Validation Loss: 0.5766 Total time: 841.38min\n", + "Epoch time: 1.6632 minutes. 500 epochs: 13.8600 hours.\n", + "Epochs: 431. Working time: 14.02 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bo.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41408000 Examples seen. Accuracy: 0.8722 Error: 0.33281 Loss: 0.35128 Threads: 8 Forward time: 3.08s Backward time: 3.68s Step time: 34.23s\n", + "41440000 Examples seen. Accuracy: 0.8739 Error: 0.38888 Loss: 0.43075 Threads: 8 Forward time: 3.06s Backward time: 3.42s Step time: 35.64s\n", + "41472000 Examples seen. Accuracy: 0.8655 Error: 0.36918 Loss: 0.47596 Threads: 8 Forward time: 3.12s Backward time: 3.68s Step time: 33.95s\n", + "Starting Validation.\n", + "Epochs: 432 Examples seen:41472000 Validation Accuracy: 0.8245 Validation Error: 0.4515 Validation Loss: 0.5965 Total time: 843.15min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.960 Min Weight: -1.001 Max Output: 0.744 Min Output: -0.762 TNNetPointwiseConv 81,1,32 Times: 0.22s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.744 Min Output: -0.762 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.714 Min Weight: -0.718 Max Output: 4.318 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.07s Parent:2\n", + "Layer 4 Max Output: 4.318 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.318 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.383 Min Weight: -0.384 Max Output: 4.626 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.38s 0.24s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.238 Min Weight: -0.243 Max Output: 3.517 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.47s 0.37s Parent:6\n", + "Layer 8 Max Output: 3.517 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.09s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.140 Min Weight: -0.196 Max Output: 2.217 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.68s 0.35s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.211 Max Output: 4.154 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.069 Min Weight: -0.614 Max Output: 18.317 Min Output: -4.364 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.997 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.6976 minutes. 500 epochs: 14.1463 hours.\n", + "Epochs: 432. Working time: 14.05 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was playing in her backyard. she saw a big bag .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41504000 Examples seen. Accuracy: 0.8577 Error: 0.37647 Loss: 0.46463 Threads: 8 Forward time: 3.41s Backward time: 4.07s Step time: 34.05s\n", + "41536000 Examples seen. Accuracy: 0.8490 Error: 0.44968 Loss: 0.59407 Threads: 8 Forward time: 3.57s Backward time: 4.28s Step time: 34.31s\n", + "41568000 Examples seen. Accuracy: 0.8486 Error: 0.35772 Loss: 0.39533 Threads: 8 Forward time: 3.54s Backward time: 4.01s Step time: 35.12s\n", + "Starting Validation.\n", + "Epochs: 433 Examples seen:41568000 Validation Accuracy: 0.8253 Validation Error: 0.4606 Validation Loss: 0.5823 Total time: 844.92min\n", + "Epoch time: 1.7559 minutes. 500 epochs: 14.6325 hours.\n", + "Epochs: 433. Working time: 14.08 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park. she saw a big box with her mo.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "41600000 Examples seen. Accuracy: 0.8462 Error: 0.36516 Loss: 0.39420 Threads: 8 Forward time: 3.18s Backward time: 3.64s Step time: 33.90s\n", + "41632000 Examples seen. Accuracy: 0.8638 Error: 0.39982 Loss: 0.45812 Threads: 8 Forward time: 3.54s Backward time: 4.16s Step time: 34.73s\n", + "41664000 Examples seen. Accuracy: 0.8578 Error: 0.34484 Loss: 0.37086 Threads: 8 Forward time: 3.30s Backward time: 3.70s Step time: 34.86s\n", + "Starting Validation.\n", + "Epochs: 434 Examples seen:41664000 Validation Accuracy: 0.8255 Validation Error: 0.4450 Validation Loss: 0.5900 Total time: 846.69min\n", + "Epoch time: 1.7430 minutes. 500 epochs: 14.5246 hours.\n", + "Epochs: 434. Working time: 14.11 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sun was shin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41696000 Examples seen. Accuracy: 0.8705 Error: 0.30904 Loss: 0.32306 Threads: 8 Forward time: 3.18s Backward time: 3.70s Step time: 35.30s\n", + "41728000 Examples seen. Accuracy: 0.8615 Error: 0.29318 Loss: 0.31389 Threads: 8 Forward time: 3.06s Backward time: 3.54s Step time: 34.87s\n", + "41760000 Examples seen. Accuracy: 0.8528 Error: 0.38776 Loss: 0.44938 Threads: 8 Forward time: 3.05s Backward time: 3.38s Step time: 34.85s\n", + "Starting Validation.\n", + "Epochs: 435 Examples seen:41760000 Validation Accuracy: 0.8226 Validation Error: 0.4568 Validation Loss: 0.5754 Total time: 848.48min\n", + "Epoch time: 1.7427 minutes. 500 epochs: 14.5229 hours.\n", + "Epochs: 435. Working time: 14.14 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the shell and a .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41792000 Examples seen. Accuracy: 0.8502 Error: 0.41199 Loss: 0.49425 Threads: 8 Forward time: 3.30s Backward time: 3.72s Step time: 34.73s\n", + "41824000 Examples seen. Accuracy: 0.8485 Error: 0.36217 Loss: 0.44118 Threads: 8 Forward time: 3.25s Backward time: 3.86s Step time: 35.45s\n", + "41856000 Examples seen. Accuracy: 0.8480 Error: 0.39024 Loss: 0.45944 Threads: 8 Forward time: 3.15s Backward time: 3.47s Step time: 35.72s\n", + "Starting Validation.\n", + "Epochs: 436 Examples seen:41856000 Validation Accuracy: 0.8232 Validation Error: 0.4601 Validation Loss: 0.5898 Total time: 850.29min\n", + "Epoch time: 1.7862 minutes. 500 epochs: 14.8846 hours.\n", + "Epochs: 436. Working time: 14.17 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they wanted to see.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "41888000 Examples seen. Accuracy: 0.8487 Error: 0.36234 Loss: 0.45796 Threads: 8 Forward time: 3.21s Backward time: 3.67s Step time: 33.78s\n", + "41920000 Examples seen. Accuracy: 0.8480 Error: 0.46383 Loss: 0.60152 Threads: 8 Forward time: 3.11s Backward time: 3.53s Step time: 33.36s\n", + "41952000 Examples seen. Accuracy: 0.8495 Error: 0.41296 Loss: 0.47370 Threads: 8 Forward time: 3.12s Backward time: 3.54s Step time: 33.59s\n", + "Starting Validation.\n", + "Epochs: 437 Examples seen:41952000 Validation Accuracy: 0.8253 Validation Error: 0.4584 Validation Loss: 0.5806 Total time: 852.01min\n", + "Epoch time: 1.6795 minutes. 500 epochs: 13.9954 hours.\n", + "Epochs: 437. Working time: 14.20 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mommy. they saw a big.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "41984000 Examples seen. Accuracy: 0.8501 Error: 0.40457 Loss: 0.51585 Threads: 8 Forward time: 3.34s Backward time: 3.94s Step time: 33.57s\n", + "42016000 Examples seen. Accuracy: 0.8489 Error: 0.39173 Loss: 0.49840 Threads: 8 Forward time: 3.28s Backward time: 3.66s Step time: 32.88s\n", + "42048000 Examples seen. Accuracy: 0.8464 Error: 0.42452 Loss: 0.52839 Threads: 8 Forward time: 3.21s Backward time: 3.60s Step time: 32.83s\n", + "Starting Validation.\n", + "Epochs: 438 Examples seen:42048000 Validation Accuracy: 0.8260 Validation Error: 0.4587 Validation Loss: 0.5761 Total time: 853.71min\n", + "Epoch time: 1.6416 minutes. 500 epochs: 13.6804 hours.\n", + "Epochs: 438. Working time: 14.23 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "42080000 Examples seen. Accuracy: 0.8698 Error: 0.30760 Loss: 0.33351 Threads: 8 Forward time: 3.18s Backward time: 3.70s Step time: 33.89s\n", + "42112000 Examples seen. Accuracy: 0.8625 Error: 0.38587 Loss: 0.47722 Threads: 8 Forward time: 3.29s Backward time: 3.92s Step time: 34.45s\n", + "42144000 Examples seen. Accuracy: 0.8531 Error: 0.34647 Loss: 0.37791 Threads: 8 Forward time: 3.03s Backward time: 3.42s Step time: 34.69s\n", + "Starting Validation.\n", + "Epochs: 439 Examples seen:42144000 Validation Accuracy: 0.8270 Validation Error: 0.4547 Validation Loss: 0.5710 Total time: 855.47min\n", + "Epoch time: 1.7343 minutes. 500 epochs: 14.4525 hours.\n", + "Epochs: 439. Working time: 14.26 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42176000 Examples seen. Accuracy: 0.8505 Error: 0.44143 Loss: 0.57760 Threads: 8 Forward time: 3.25s Backward time: 3.52s Step time: 34.39s\n", + "42208000 Examples seen. Accuracy: 0.8496 Error: 0.43039 Loss: 0.50234 Threads: 8 Forward time: 3.04s Backward time: 3.42s Step time: 32.97s\n", + "42240000 Examples seen. Accuracy: 0.8481 Error: 0.46303 Loss: 0.54904 Threads: 8 Forward time: 3.32s Backward time: 3.87s Step time: 33.43s\n", + "Starting Validation.\n", + "Epochs: 440 Examples seen:42240000 Validation Accuracy: 0.8262 Validation Error: 0.4580 Validation Loss: 0.5737 Total time: 857.19min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.961 Min Weight: -1.000 Max Output: 0.745 Min Output: -0.761 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.10s Parent:0\n", + "Layer 2 Max Output: 0.745 Min Output: -0.761 TNNetPadXY 83,1,32 Times: 0.01s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.719 Min Weight: -0.720 Max Output: 4.327 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.08s Parent:2\n", + "Layer 4 Max Output: 4.327 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.00s Parent:3\n", + "Layer 5 Max Output: 4.327 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.384 Min Weight: -0.381 Max Output: 4.632 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.43s 0.29s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.239 Min Weight: -0.245 Max Output: 3.552 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.58s 0.38s Parent:6\n", + "Layer 8 Max Output: 3.552 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.140 Min Weight: -0.193 Max Output: 2.012 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.74s 0.35s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.259 Min Weight: -0.211 Max Output: 4.301 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.06s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.071 Min Weight: -0.618 Max Output: 16.291 Min Output: -4.292 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.899 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 440 Examples seen:42240000 Test Accuracy: 0.8262 Test Error: 0.4580 Test Loss: 0.5737 Total time: 857.23min\n", + "Epoch time: 1.6716 minutes. 500 epochs: 13.9300 hours.\n", + "Epochs: 440. Working time: 14.29 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42272000 Examples seen. Accuracy: 0.8480 Error: 0.44349 Loss: 0.53192 Threads: 8 Forward time: 3.34s Backward time: 3.72s Step time: 33.47s\n", + "42304000 Examples seen. Accuracy: 0.8641 Error: 0.28336 Loss: 0.28434 Threads: 8 Forward time: 3.08s Backward time: 3.48s Step time: 33.52s\n", + "42336000 Examples seen. Accuracy: 0.8697 Error: 0.43012 Loss: 0.50394 Threads: 8 Forward time: 3.25s Backward time: 3.83s Step time: 34.07s\n", + "Starting Validation.\n", + "Epochs: 441 Examples seen:42336000 Validation Accuracy: 0.8276 Validation Error: 0.4428 Validation Loss: 0.5804 Total time: 858.95min\n", + "Epoch time: 1.7035 minutes. 500 epochs: 14.1958 hours.\n", + "Epochs: 441. Working time: 14.32 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were so he sa.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42368000 Examples seen. Accuracy: 0.8571 Error: 0.30851 Loss: 0.29153 Threads: 8 Forward time: 3.09s Backward time: 3.43s Step time: 33.32s\n", + "42400000 Examples seen. Accuracy: 0.8760 Error: 0.33671 Loss: 0.36049 Threads: 8 Forward time: 3.08s Backward time: 3.69s Step time: 34.98s\n", + "42432000 Examples seen. Accuracy: 0.8676 Error: 0.37441 Loss: 0.40319 Threads: 8 Forward time: 3.06s Backward time: 3.49s Step time: 35.13s\n", + "Starting Validation.\n", + "Epochs: 442 Examples seen:42432000 Validation Accuracy: 0.8253 Validation Error: 0.4437 Validation Loss: 0.5727 Total time: 860.72min\n", + "Epoch time: 1.7563 minutes. 500 epochs: 14.6358 hours.\n", + "Epochs: 442. Working time: 14.35 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they wanted to see.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42464000 Examples seen. Accuracy: 0.8641 Error: 0.31550 Loss: 0.35350 Threads: 8 Forward time: 2.97s Backward time: 3.40s Step time: 33.79s\n", + "42496000 Examples seen. Accuracy: 0.8742 Error: 0.37128 Loss: 0.43707 Threads: 8 Forward time: 3.29s Backward time: 3.93s Step time: 34.26s\n", + "42528000 Examples seen. Accuracy: 0.8631 Error: 0.40595 Loss: 0.50969 Threads: 8 Forward time: 3.27s Backward time: 3.80s Step time: 34.40s\n", + "Starting Validation.\n", + "Epochs: 443 Examples seen:42528000 Validation Accuracy: 0.8239 Validation Error: 0.4563 Validation Loss: 0.5792 Total time: 862.47min\n", + "Epoch time: 1.7201 minutes. 500 epochs: 14.3338 hours.\n", + "Epochs: 443. Working time: 14.37 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went for a walk. he saw a big box of toys. he wan.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42560000 Examples seen. Accuracy: 0.8564 Error: 0.45545 Loss: 0.53089 Threads: 8 Forward time: 3.07s Backward time: 3.49s Step time: 33.90s\n", + "42592000 Examples seen. Accuracy: 0.8525 Error: 0.41087 Loss: 0.51258 Threads: 8 Forward time: 3.11s Backward time: 3.48s Step time: 33.25s\n", + "42624000 Examples seen. Accuracy: 0.8698 Error: 0.26969 Loss: 0.28130 Threads: 8 Forward time: 3.11s Backward time: 3.73s Step time: 33.31s\n", + "Starting Validation.\n", + "Epochs: 444 Examples seen:42624000 Validation Accuracy: 0.8251 Validation Error: 0.4387 Validation Loss: 0.6028 Total time: 864.18min\n", + "Epoch time: 1.6654 minutes. 500 epochs: 13.8783 hours.\n", + "Epochs: 444. Working time: 14.40 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42656000 Examples seen. Accuracy: 0.8776 Error: 0.36652 Loss: 0.46233 Threads: 8 Forward time: 3.07s Backward time: 3.48s Step time: 33.71s\n", + "42688000 Examples seen. Accuracy: 0.8720 Error: 0.27040 Loss: 0.28061 Threads: 8 Forward time: 3.07s Backward time: 3.65s Step time: 31.48s\n", + "42720000 Examples seen. Accuracy: 0.8832 Error: 0.27013 Loss: 0.25904 Threads: 8 Forward time: 3.32s Backward time: 3.83s Step time: 31.77s\n", + "Starting Validation.\n", + "Epochs: 445 Examples seen:42720000 Validation Accuracy: 0.8247 Validation Error: 0.4398 Validation Loss: 0.6014 Total time: 865.84min\n", + "Epoch time: 1.5885 minutes. 500 epochs: 13.2379 hours.\n", + "Epochs: 445. Working time: 14.43 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they were so happy.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42752000 Examples seen. Accuracy: 0.8909 Error: 0.29496 Loss: 0.29895 Threads: 8 Forward time: 3.08s Backward time: 3.45s Step time: 31.71s\n", + "42784000 Examples seen. Accuracy: 0.8783 Error: 0.32498 Loss: 0.37990 Threads: 8 Forward time: 3.07s Backward time: 3.33s Step time: 31.51s\n", + "42816000 Examples seen. Accuracy: 0.8721 Error: 0.28924 Loss: 0.28152 Threads: 8 Forward time: 3.00s Backward time: 3.21s Step time: 32.08s\n", + "Starting Validation.\n", + "Epochs: 446 Examples seen:42816000 Validation Accuracy: 0.8241 Validation Error: 0.4404 Validation Loss: 0.5869 Total time: 867.47min\n", + "Epoch time: 1.6038 minutes. 500 epochs: 13.3646 hours.\n", + "Epochs: 446. Working time: 14.46 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "42848000 Examples seen. Accuracy: 0.8922 Error: 0.24549 Loss: 0.22278 Threads: 8 Forward time: 3.56s Backward time: 4.32s Step time: 33.29s\n", + "42880000 Examples seen. Accuracy: 0.8860 Error: 0.41174 Loss: 0.45586 Threads: 8 Forward time: 3.43s Backward time: 4.29s Step time: 34.92s\n", + "42912000 Examples seen. Accuracy: 0.8698 Error: 0.31297 Loss: 0.35217 Threads: 8 Forward time: 3.44s Backward time: 3.89s Step time: 35.41s\n", + "Starting Validation.\n", + "Epochs: 447 Examples seen:42912000 Validation Accuracy: 0.8216 Validation Error: 0.4485 Validation Loss: 0.5923 Total time: 869.24min\n", + "Epoch time: 1.7707 minutes. 500 epochs: 14.7562 hours.\n", + "Epochs: 447. Working time: 14.49 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily was playing in her garden. she saw something in.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "42944000 Examples seen. Accuracy: 0.8781 Error: 0.34749 Loss: 0.43999 Threads: 8 Forward time: 3.21s Backward time: 3.72s Step time: 35.27s\n", + "42976000 Examples seen. Accuracy: 0.8574 Error: 0.37528 Loss: 0.41049 Threads: 8 Forward time: 3.40s Backward time: 4.03s Step time: 35.24s\n", + "43008000 Examples seen. Accuracy: 0.8822 Error: 0.39807 Loss: 0.55039 Threads: 8 Forward time: 3.17s Backward time: 3.75s Step time: 34.11s\n", + "Starting Validation.\n", + "Epochs: 448 Examples seen:43008000 Validation Accuracy: 0.8182 Validation Error: 0.4331 Validation Loss: 0.6116 Total time: 871.03min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.975 Min Weight: -1.005 Max Output: 0.751 Min Output: -0.764 TNNetPointwiseConv 81,1,32 Times: 0.19s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.751 Min Output: -0.764 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.726 Min Weight: -0.727 Max Output: 4.425 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.425 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.425 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.385 Min Weight: -0.384 Max Output: 4.763 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.24s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.241 Min Weight: -0.245 Max Output: 3.699 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.51s 0.41s Parent:6\n", + "Layer 8 Max Output: 3.699 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.142 Min Weight: -0.202 Max Output: 2.204 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.71s 0.37s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.259 Min Weight: -0.212 Max Output: 4.874 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.04s 0.03s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.080 Min Weight: -0.623 Max Output: 20.891 Min Output: -5.209 TNNetFullConnectLinear 128,1,1 Times: 0.02s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.996 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7053 minutes. 500 epochs: 14.2104 hours.\n", + "Epochs: 448. Working time: 14.52 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "43040000 Examples seen. Accuracy: 0.8611 Error: 0.38833 Loss: 0.50563 Threads: 8 Forward time: 3.36s Backward time: 3.94s Step time: 34.14s\n", + "43072000 Examples seen. Accuracy: 0.8522 Error: 0.50352 Loss: 0.60409 Threads: 8 Forward time: 3.09s Backward time: 3.42s Step time: 33.30s\n", + "43104000 Examples seen. Accuracy: 0.8512 Error: 0.33443 Loss: 0.43434 Threads: 8 Forward time: 3.08s Backward time: 3.58s Step time: 32.76s\n", + "Starting Validation.\n", + "Epochs: 449 Examples seen:43104000 Validation Accuracy: 0.8226 Validation Error: 0.4448 Validation Loss: 0.5838 Total time: 872.74min\n", + "Epoch time: 1.6381 minutes. 500 epochs: 13.6508 hours.\n", + "Epochs: 449. Working time: 14.55 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "43136000 Examples seen. Accuracy: 0.8478 Error: 0.36731 Loss: 0.44671 Threads: 8 Forward time: 3.39s Backward time: 4.07s Step time: 34.73s\n", + "43168000 Examples seen. Accuracy: 0.8628 Error: 0.28720 Loss: 0.30455 Threads: 8 Forward time: 3.12s Backward time: 3.54s Step time: 34.17s\n", + "43200000 Examples seen. Accuracy: 0.8814 Error: 0.36878 Loss: 0.43290 Threads: 8 Forward time: 3.23s Backward time: 3.78s Step time: 34.47s\n", + "Starting Validation.\n", + "Epochs: 450 Examples seen:43200000 Validation Accuracy: 0.8199 Validation Error: 0.4438 Validation Loss: 0.5945 Total time: 874.51min\n", + "Starting Testing.\n", + "Epochs: 450 Examples seen:43200000 Test Accuracy: 0.8199 Test Error: 0.4438 Test Loss: 0.5945 Total time: 874.54min\n", + "Epoch time: 1.7237 minutes. 500 epochs: 14.3638 hours.\n", + "Epochs: 450. Working time: 14.58 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. tim saw a big truc.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "43232000 Examples seen. Accuracy: 0.8742 Error: 0.41795 Loss: 0.47871 Threads: 8 Forward time: 3.30s Backward time: 3.84s Step time: 35.53s\n", + "43264000 Examples seen. Accuracy: 0.8746 Error: 0.26116 Loss: 0.24343 Threads: 8 Forward time: 3.43s Backward time: 3.99s Step time: 35.81s\n", + "43296000 Examples seen. Accuracy: 0.8948 Error: 0.29591 Loss: 0.31252 Threads: 8 Forward time: 3.28s Backward time: 4.05s Step time: 36.17s\n", + "Starting Validation.\n", + "Epochs: 451 Examples seen:43296000 Validation Accuracy: 0.8228 Validation Error: 0.4330 Validation Loss: 0.6180 Total time: 876.38min\n", + "Epoch time: 1.8087 minutes. 500 epochs: 15.0721 hours.\n", + "Epochs: 451. Working time: 14.61 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tru.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "43328000 Examples seen. Accuracy: 0.8689 Error: 0.34998 Loss: 0.39323 Threads: 8 Forward time: 3.24s Backward time: 3.83s Step time: 34.33s\n", + "43360000 Examples seen. Accuracy: 0.8552 Error: 0.43308 Loss: 0.52579 Threads: 8 Forward time: 3.30s Backward time: 3.93s Step time: 34.82s\n", + "43392000 Examples seen. Accuracy: 0.8485 Error: 0.43799 Loss: 0.48830 Threads: 8 Forward time: 3.03s Backward time: 3.38s Step time: 34.60s\n", + "Starting Validation.\n", + "Epochs: 452 Examples seen:43392000 Validation Accuracy: 0.8195 Validation Error: 0.4622 Validation Loss: 0.5857 Total time: 878.15min\n", + "Epoch time: 1.7298 minutes. 500 epochs: 14.4150 hours.\n", + "Epochs: 452. Working time: 14.64 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "43424000 Examples seen. Accuracy: 0.8698 Error: 0.29247 Loss: 0.27941 Threads: 8 Forward time: 3.29s Backward time: 3.79s Step time: 33.85s\n", + "43456000 Examples seen. Accuracy: 0.8853 Error: 0.29488 Loss: 0.34071 Threads: 8 Forward time: 3.33s Backward time: 4.10s Step time: 35.25s\n", + "43488000 Examples seen. Accuracy: 0.8969 Error: 0.34062 Loss: 0.38104 Threads: 8 Forward time: 3.17s Backward time: 3.67s Step time: 34.73s\n", + "Starting Validation.\n", + "Epochs: 453 Examples seen:43488000 Validation Accuracy: 0.8172 Validation Error: 0.4332 Validation Loss: 0.6210 Total time: 879.92min\n", + "Epoch time: 1.7363 minutes. 500 epochs: 14.4688 hours.\n", + "Epochs: 453. Working time: 14.67 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "43520000 Examples seen. Accuracy: 0.8930 Error: 0.32696 Loss: 0.36471 Threads: 8 Forward time: 3.08s Backward time: 3.63s Step time: 35.23s\n", + "43552000 Examples seen. Accuracy: 0.9036 Error: 0.34638 Loss: 0.45223 Threads: 8 Forward time: 3.18s Backward time: 3.83s Step time: 34.90s\n", + "43584000 Examples seen. Accuracy: 0.8667 Error: 0.39337 Loss: 0.47459 Threads: 8 Forward time: 3.18s Backward time: 3.68s Step time: 34.92s\n", + "Starting Validation.\n", + "Epochs: 454 Examples seen:43584000 Validation Accuracy: 0.8212 Validation Error: 0.4578 Validation Loss: 0.5839 Total time: 881.72min\n", + "Epoch time: 1.7459 minutes. 500 epochs: 14.5488 hours.\n", + "Epochs: 454. Working time: 14.70 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "43616000 Examples seen. Accuracy: 0.8548 Error: 0.33806 Loss: 0.43043 Threads: 8 Forward time: 3.37s Backward time: 4.11s Step time: 32.96s\n", + "43648000 Examples seen. Accuracy: 0.8479 Error: 0.42268 Loss: 0.54870 Threads: 8 Forward time: 3.25s Backward time: 3.89s Step time: 33.38s\n", + "43680000 Examples seen. Accuracy: 0.8494 Error: 0.36199 Loss: 0.51623 Threads: 8 Forward time: 3.11s Backward time: 3.68s Step time: 33.32s\n", + "Starting Validation.\n", + "Epochs: 455 Examples seen:43680000 Validation Accuracy: 0.8218 Validation Error: 0.4555 Validation Loss: 0.5801 Total time: 883.42min\n", + "Epoch time: 1.6659 minutes. 500 epochs: 13.8825 hours.\n", + "Epochs: 455. Working time: 14.72 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named sarah was playing in her backyard when her garden. s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "43712000 Examples seen. Accuracy: 0.8485 Error: 0.40985 Loss: 0.55635 Threads: 8 Forward time: 3.13s Backward time: 3.49s Step time: 33.29s\n", + "43744000 Examples seen. Accuracy: 0.8478 Error: 0.37985 Loss: 0.44550 Threads: 8 Forward time: 3.16s Backward time: 3.57s Step time: 33.69s\n", + "43776000 Examples seen. Accuracy: 0.8483 Error: 0.44474 Loss: 0.54515 Threads: 8 Forward time: 3.26s Backward time: 3.66s Step time: 33.25s\n", + "Starting Validation.\n", + "Epochs: 456 Examples seen:43776000 Validation Accuracy: 0.8230 Validation Error: 0.4576 Validation Loss: 0.5856 Total time: 885.13min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.973 Min Weight: -1.010 Max Output: 0.750 Min Output: -0.766 TNNetPointwiseConv 81,1,32 Times: 0.20s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.750 Min Output: -0.766 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.732 Min Weight: -0.732 Max Output: 4.451 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.451 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.451 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.391 Min Weight: -0.385 Max Output: 4.782 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.39s 0.26s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.238 Min Weight: -0.249 Max Output: 3.725 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.49s 0.38s Parent:6\n", + "Layer 8 Max Output: 3.725 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.140 Min Weight: -0.203 Max Output: 2.165 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.72s 0.34s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.261 Min Weight: -0.212 Max Output: 4.066 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.03s 0.06s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.083 Min Weight: -0.625 Max Output: 16.574 Min Output: -4.235 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.980 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.6624 minutes. 500 epochs: 13.8537 hours.\n", + "Epochs: 456. Working time: 14.75 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy went to the park. she saw a big stretch friend .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "43808000 Examples seen. Accuracy: 0.8509 Error: 0.37006 Loss: 0.41651 Threads: 8 Forward time: 3.15s Backward time: 3.60s Step time: 33.82s\n", + "43840000 Examples seen. Accuracy: 0.8654 Error: 0.27414 Loss: 0.26524 Threads: 8 Forward time: 3.10s Backward time: 3.52s Step time: 34.03s\n", + "43872000 Examples seen. Accuracy: 0.8692 Error: 0.36495 Loss: 0.41010 Threads: 8 Forward time: 3.27s Backward time: 3.74s Step time: 34.12s\n", + "Starting Validation.\n", + "Epochs: 457 Examples seen:43872000 Validation Accuracy: 0.8235 Validation Error: 0.4480 Validation Loss: 0.5852 Total time: 886.88min\n", + "Epoch time: 1.7059 minutes. 500 epochs: 14.2162 hours.\n", + "Epochs: 457. Working time: 14.78 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the shell with h.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "43904000 Examples seen. Accuracy: 0.8584 Error: 0.42715 Loss: 0.50246 Threads: 8 Forward time: 3.12s Backward time: 3.43s Step time: 33.98s\n", + "43936000 Examples seen. Accuracy: 0.8513 Error: 0.42794 Loss: 0.54702 Threads: 8 Forward time: 3.22s Backward time: 3.47s Step time: 33.16s\n", + "43968000 Examples seen. Accuracy: 0.8572 Error: 0.34900 Loss: 0.39342 Threads: 8 Forward time: 3.08s Backward time: 3.66s Step time: 33.05s\n", + "Starting Validation.\n", + "Epochs: 458 Examples seen:43968000 Validation Accuracy: 0.8260 Validation Error: 0.4343 Validation Loss: 0.5875 Total time: 888.59min\n", + "Epoch time: 1.6523 minutes. 500 epochs: 13.7692 hours.\n", + "Epochs: 458. Working time: 14.81 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw many an.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "44000000 Examples seen. Accuracy: 0.8512 Error: 0.44594 Loss: 0.54169 Threads: 8 Forward time: 3.11s Backward time: 3.47s Step time: 33.18s\n", + "44032000 Examples seen. Accuracy: 0.8504 Error: 0.42549 Loss: 0.54906 Threads: 8 Forward time: 3.25s Backward time: 3.60s Step time: 33.63s\n", + "44064000 Examples seen. Accuracy: 0.8559 Error: 0.28761 Loss: 0.28679 Threads: 8 Forward time: 3.06s Backward time: 3.49s Step time: 33.42s\n", + "Starting Validation.\n", + "Epochs: 459 Examples seen:44064000 Validation Accuracy: 0.8228 Validation Error: 0.4410 Validation Loss: 0.5787 Total time: 890.30min\n", + "Epoch time: 1.6711 minutes. 500 epochs: 13.9254 hours.\n", + "Epochs: 459. Working time: 14.84 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big bl.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44096000 Examples seen. Accuracy: 0.8665 Error: 0.33176 Loss: 0.35130 Threads: 8 Forward time: 3.11s Backward time: 3.48s Step time: 33.70s\n", + "44128000 Examples seen. Accuracy: 0.8832 Error: 0.38423 Loss: 0.43780 Threads: 8 Forward time: 3.26s Backward time: 3.97s Step time: 33.73s\n", + "44160000 Examples seen. Accuracy: 0.8733 Error: 0.36795 Loss: 0.45304 Threads: 8 Forward time: 3.15s Backward time: 3.61s Step time: 34.51s\n", + "Starting Validation.\n", + "Epochs: 460 Examples seen:44160000 Validation Accuracy: 0.8245 Validation Error: 0.4424 Validation Loss: 0.5747 Total time: 892.04min\n", + "Starting Testing.\n", + "Epochs: 460 Examples seen:44160000 Test Accuracy: 0.8245 Test Error: 0.4424 Test Loss: 0.5747 Total time: 892.08min\n", + "Epoch time: 1.7256 minutes. 500 epochs: 14.3800 hours.\n", + "Epochs: 460. Working time: 14.87 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big po.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44192000 Examples seen. Accuracy: 0.8890 Error: 0.35429 Loss: 0.34196 Threads: 8 Forward time: 3.26s Backward time: 3.83s Step time: 34.96s\n", + "44224000 Examples seen. Accuracy: 0.8853 Error: 0.38396 Loss: 0.41729 Threads: 8 Forward time: 3.05s Backward time: 3.47s Step time: 34.73s\n", + "44256000 Examples seen. Accuracy: 0.8776 Error: 0.37210 Loss: 0.40225 Threads: 8 Forward time: 3.65s Backward time: 4.34s Step time: 34.78s\n", + "Starting Validation.\n", + "Epochs: 461 Examples seen:44256000 Validation Accuracy: 0.8241 Validation Error: 0.4325 Validation Loss: 0.5805 Total time: 893.86min\n", + "Epoch time: 1.7392 minutes. 500 epochs: 14.4929 hours.\n", + "Epochs: 461. Working time: 14.90 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in her backyard. she saw a big bag .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44288000 Examples seen. Accuracy: 0.8566 Error: 0.42166 Loss: 0.49689 Threads: 8 Forward time: 3.23s Backward time: 3.69s Step time: 34.32s\n", + "44320000 Examples seen. Accuracy: 0.8517 Error: 0.40719 Loss: 0.56037 Threads: 8 Forward time: 3.07s Backward time: 3.45s Step time: 34.64s\n", + "44352000 Examples seen. Accuracy: 0.8509 Error: 0.42150 Loss: 0.48393 Threads: 8 Forward time: 3.33s Backward time: 3.87s Step time: 34.03s\n", + "Starting Validation.\n", + "Epochs: 462 Examples seen:44352000 Validation Accuracy: 0.8258 Validation Error: 0.4515 Validation Loss: 0.5797 Total time: 895.62min\n", + "Epoch time: 1.7017 minutes. 500 epochs: 14.1808 hours.\n", + "Epochs: 462. Working time: 14.93 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. she saw a big tr.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44384000 Examples seen. Accuracy: 0.8474 Error: 0.45718 Loss: 0.53082 Threads: 8 Forward time: 3.10s Backward time: 3.57s Step time: 34.55s\n", + "44416000 Examples seen. Accuracy: 0.8480 Error: 0.41334 Loss: 0.52851 Threads: 8 Forward time: 3.34s Backward time: 3.80s Step time: 33.72s\n", + "44448000 Examples seen. Accuracy: 0.8457 Error: 0.36578 Loss: 0.37344 Threads: 8 Forward time: 3.42s Backward time: 3.96s Step time: 34.01s\n", + "Starting Validation.\n", + "Epochs: 463 Examples seen:44448000 Validation Accuracy: 0.8210 Validation Error: 0.4600 Validation Loss: 0.5684 Total time: 897.37min\n", + "Epoch time: 1.7003 minutes. 500 epochs: 14.1692 hours.\n", + "Epochs: 463. Working time: 14.96 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was walking in the park. she saw a big box with.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44480000 Examples seen. Accuracy: 0.8478 Error: 0.47747 Loss: 0.60775 Threads: 8 Forward time: 2.94s Backward time: 3.45s Step time: 33.92s\n", + "44512000 Examples seen. Accuracy: 0.8535 Error: 0.36171 Loss: 0.40892 Threads: 8 Forward time: 3.13s Backward time: 3.57s Step time: 33.31s\n", + "44544000 Examples seen. Accuracy: 0.8506 Error: 0.35805 Loss: 0.41540 Threads: 8 Forward time: 3.24s Backward time: 3.73s Step time: 33.58s\n", + "Starting Validation.\n", + "Epochs: 464 Examples seen:44544000 Validation Accuracy: 0.8251 Validation Error: 0.4495 Validation Loss: 0.5730 Total time: 899.10min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.967 Min Weight: -1.008 Max Output: 0.747 Min Output: -0.765 TNNetPointwiseConv 81,1,32 Times: 0.21s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.747 Min Output: -0.765 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.728 Min Weight: -0.733 Max Output: 4.461 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.16s 0.06s Parent:2\n", + "Layer 4 Max Output: 4.461 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.461 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.390 Min Weight: -0.389 Max Output: 4.931 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.27s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.241 Min Weight: -0.250 Max Output: 3.532 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.54s 0.39s Parent:6\n", + "Layer 8 Max Output: 3.532 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.143 Min Weight: -0.199 Max Output: 2.208 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.71s 0.34s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.213 Max Output: 3.618 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.05s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.085 Min Weight: -0.627 Max Output: 14.584 Min Output: -4.093 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.866 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.6788 minutes. 500 epochs: 13.9896 hours.\n", + "Epochs: 464. Working time: 14.98 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in her backyard. she saw a big ball.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44576000 Examples seen. Accuracy: 0.8487 Error: 0.49738 Loss: 0.64243 Threads: 8 Forward time: 3.15s Backward time: 3.50s Step time: 34.12s\n", + "44608000 Examples seen. Accuracy: 0.8723 Error: 0.34390 Loss: 0.36344 Threads: 8 Forward time: 3.18s Backward time: 3.71s Step time: 34.22s\n", + "44640000 Examples seen. Accuracy: 0.8608 Error: 0.38853 Loss: 0.47021 Threads: 8 Forward time: 3.48s Backward time: 4.06s Step time: 33.77s\n", + "Starting Validation.\n", + "Epochs: 465 Examples seen:44640000 Validation Accuracy: 0.8241 Validation Error: 0.4522 Validation Loss: 0.5739 Total time: 900.84min\n", + "Epoch time: 1.6884 minutes. 500 epochs: 14.0696 hours.\n", + "Epochs: 465. Working time: 15.01 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44672000 Examples seen. Accuracy: 0.8519 Error: 0.40977 Loss: 0.47976 Threads: 8 Forward time: 3.05s Backward time: 3.38s Step time: 33.82s\n", + "44704000 Examples seen. Accuracy: 0.8496 Error: 0.39463 Loss: 0.51113 Threads: 8 Forward time: 3.46s Backward time: 4.11s Step time: 33.41s\n", + "44736000 Examples seen. Accuracy: 0.8490 Error: 0.41560 Loss: 0.50195 Threads: 8 Forward time: 3.44s Backward time: 3.98s Step time: 33.71s\n", + "Starting Validation.\n", + "Epochs: 466 Examples seen:44736000 Validation Accuracy: 0.8270 Validation Error: 0.4530 Validation Loss: 0.5621 Total time: 902.57min\n", + "Epoch time: 1.6854 minutes. 500 epochs: 14.0446 hours.\n", + "Epochs: 466. Working time: 15.04 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named mia went to the park with her mom. she saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44768000 Examples seen. Accuracy: 0.8514 Error: 0.37541 Loss: 0.41711 Threads: 8 Forward time: 3.30s Backward time: 3.80s Step time: 33.52s\n", + "44800000 Examples seen. Accuracy: 0.8501 Error: 0.45503 Loss: 0.60081 Threads: 8 Forward time: 3.50s Backward time: 4.23s Step time: 33.59s\n", + "44832000 Examples seen. Accuracy: 0.8473 Error: 0.47698 Loss: 0.68097 Threads: 8 Forward time: 3.13s Backward time: 3.56s Step time: 33.26s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 467 Examples seen:44832000 Validation Accuracy: 0.8280 Validation Error: 0.4625 Validation Loss: 0.5717 Total time: 904.32min\n", + "Epoch time: 1.6631 minutes. 500 epochs: 13.8588 hours.\n", + "Epochs: 467. Working time: 15.07 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big blu.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "44864000 Examples seen. Accuracy: 0.8466 Error: 0.37468 Loss: 0.42755 Threads: 8 Forward time: 3.11s Backward time: 3.57s Step time: 33.65s\n", + "44896000 Examples seen. Accuracy: 0.8470 Error: 0.43060 Loss: 0.53716 Threads: 8 Forward time: 3.10s Backward time: 3.47s Step time: 33.81s\n", + "44928000 Examples seen. Accuracy: 0.8479 Error: 0.38944 Loss: 0.44913 Threads: 8 Forward time: 3.09s Backward time: 3.53s Step time: 33.47s\n", + "Starting Validation.\n", + "Epochs: 468 Examples seen:44928000 Validation Accuracy: 0.8243 Validation Error: 0.4497 Validation Loss: 0.5745 Total time: 906.05min\n", + "Epoch time: 1.6737 minutes. 500 epochs: 13.9479 hours.\n", + "Epochs: 468. Working time: 15.10 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "44960000 Examples seen. Accuracy: 0.8479 Error: 0.34197 Loss: 0.41751 Threads: 8 Forward time: 3.52s Backward time: 4.02s Step time: 34.39s\n", + "44992000 Examples seen. Accuracy: 0.8729 Error: 0.28078 Loss: 0.29016 Threads: 8 Forward time: 3.40s Backward time: 3.89s Step time: 34.96s\n", + "45024000 Examples seen. Accuracy: 0.8627 Error: 0.35139 Loss: 0.46233 Threads: 8 Forward time: 3.03s Backward time: 3.46s Step time: 33.83s\n", + "Starting Validation.\n", + "Epochs: 469 Examples seen:45024000 Validation Accuracy: 0.8266 Validation Error: 0.4536 Validation Loss: 0.5820 Total time: 907.81min\n", + "Epoch time: 1.6914 minutes. 500 epochs: 14.0950 hours.\n", + "Epochs: 469. Working time: 15.13 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "45056000 Examples seen. Accuracy: 0.8550 Error: 0.41198 Loss: 0.53260 Threads: 8 Forward time: 3.23s Backward time: 3.81s Step time: 33.49s\n", + "45088000 Examples seen. Accuracy: 0.8599 Error: 0.42163 Loss: 0.54147 Threads: 8 Forward time: 3.12s Backward time: 3.51s Step time: 33.63s\n", + "45120000 Examples seen. Accuracy: 0.8519 Error: 0.39378 Loss: 0.49788 Threads: 8 Forward time: 3.12s Backward time: 3.46s Step time: 34.35s\n", + "Starting Validation.\n", + "Epochs: 470 Examples seen:45120000 Validation Accuracy: 0.8270 Validation Error: 0.4518 Validation Loss: 0.5703 Total time: 909.54min\n", + "Starting Testing.\n", + "Epochs: 470 Examples seen:45120000 Test Accuracy: 0.8270 Test Error: 0.4518 Test Loss: 0.5703 Total time: 909.58min\n", + "Epoch time: 1.7176 minutes. 500 epochs: 14.3137 hours.\n", + "Epochs: 470. Working time: 15.16 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "45152000 Examples seen. Accuracy: 0.8745 Error: 0.25855 Loss: 0.27158 Threads: 8 Forward time: 3.61s Backward time: 4.44s Step time: 33.79s\n", + "45184000 Examples seen. Accuracy: 0.8781 Error: 0.38331 Loss: 0.42251 Threads: 8 Forward time: 3.29s Backward time: 3.92s Step time: 34.93s\n", + "45216000 Examples seen. Accuracy: 0.8722 Error: 0.41408 Loss: 0.49642 Threads: 8 Forward time: 3.39s Backward time: 4.11s Step time: 34.01s\n", + "Starting Validation.\n", + "Epochs: 471 Examples seen:45216000 Validation Accuracy: 0.8243 Validation Error: 0.4397 Validation Loss: 0.5840 Total time: 911.34min\n", + "Epoch time: 1.7004 minutes. 500 epochs: 14.1704 hours.\n", + "Epochs: 471. Working time: 15.19 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom and dad. she saw .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "45248000 Examples seen. Accuracy: 0.8578 Error: 0.35077 Loss: 0.34506 Threads: 8 Forward time: 3.11s Backward time: 3.53s Step time: 33.31s\n", + "45280000 Examples seen. Accuracy: 0.8542 Error: 0.38017 Loss: 0.46233 Threads: 8 Forward time: 3.24s Backward time: 3.74s Step time: 33.88s\n", + "45312000 Examples seen. Accuracy: 0.8526 Error: 0.38830 Loss: 0.48033 Threads: 8 Forward time: 3.09s Backward time: 3.53s Step time: 33.71s\n", + "Starting Validation.\n", + "Epochs: 472 Examples seen:45312000 Validation Accuracy: 0.8243 Validation Error: 0.4455 Validation Loss: 0.5745 Total time: 913.06min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.970 Min Weight: -1.008 Max Output: 0.749 Min Output: -0.765 TNNetPointwiseConv 81,1,32 Times: 0.21s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.749 Min Output: -0.765 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.729 Min Weight: -0.741 Max Output: 4.498 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.13s 0.07s Parent:2\n", + "Layer 4 Max Output: 4.498 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.02s 0.00s Parent:3\n", + "Layer 5 Max Output: 4.498 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.393 Min Weight: -0.390 Max Output: 4.622 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.39s 0.24s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.241 Min Weight: -0.248 Max Output: 3.461 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.51s 0.37s Parent:6\n", + "Layer 8 Max Output: 3.461 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.143 Min Weight: -0.203 Max Output: 2.077 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.67s 0.32s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.259 Min Weight: -0.213 Max Output: 4.059 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.03s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.086 Min Weight: -0.627 Max Output: 16.213 Min Output: -4.053 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.963 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.6854 minutes. 500 epochs: 14.0446 hours.\n", + "Epochs: 472. Working time: 15.22 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "45344000 Examples seen. Accuracy: 0.8516 Error: 0.44614 Loss: 0.53711 Threads: 8 Forward time: 2.99s Backward time: 3.36s Step time: 33.38s\n", + "45376000 Examples seen. Accuracy: 0.8516 Error: 0.36628 Loss: 0.45520 Threads: 8 Forward time: 3.41s Backward time: 3.87s Step time: 33.60s\n", + "45408000 Examples seen. Accuracy: 0.8532 Error: 0.37547 Loss: 0.47603 Threads: 8 Forward time: 3.11s Backward time: 3.44s Step time: 33.44s\n", + "Starting Validation.\n", + "Epochs: 473 Examples seen:45408000 Validation Accuracy: 0.8260 Validation Error: 0.4389 Validation Loss: 0.5736 Total time: 914.78min\n", + "Epoch time: 1.6721 minutes. 500 epochs: 13.9342 hours.\n", + "Epochs: 473. Working time: 15.25 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "45440000 Examples seen. Accuracy: 0.8501 Error: 0.42449 Loss: 0.54298 Threads: 8 Forward time: 3.14s Backward time: 3.38s Step time: 33.39s\n", + "45472000 Examples seen. Accuracy: 0.8514 Error: 0.36107 Loss: 0.37924 Threads: 8 Forward time: 3.18s Backward time: 3.55s Step time: 32.96s\n", + "45504000 Examples seen. Accuracy: 0.8494 Error: 0.43548 Loss: 0.53709 Threads: 8 Forward time: 3.42s Backward time: 3.99s Step time: 32.98s\n", + "Starting Validation.\n", + "Epochs: 474 Examples seen:45504000 Validation Accuracy: 0.8241 Validation Error: 0.4552 Validation Loss: 0.5802 Total time: 916.47min\n", + "Epoch time: 1.6491 minutes. 500 epochs: 13.7421 hours.\n", + "Epochs: 474. Working time: 15.27 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lucy went to the park. she saw a big pile of the sho.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "45536000 Examples seen. Accuracy: 0.8464 Error: 0.36257 Loss: 0.36972 Threads: 8 Forward time: 3.03s Backward time: 3.38s Step time: 32.72s\n", + "45568000 Examples seen. Accuracy: 0.8627 Error: 0.37836 Loss: 0.42267 Threads: 8 Forward time: 3.18s Backward time: 3.46s Step time: 33.43s\n", + "45600000 Examples seen. Accuracy: 0.8537 Error: 0.40715 Loss: 0.50736 Threads: 8 Forward time: 3.58s Backward time: 4.31s Step time: 32.90s\n", + "Starting Validation.\n", + "Epochs: 475 Examples seen:45600000 Validation Accuracy: 0.8249 Validation Error: 0.4605 Validation Loss: 0.5879 Total time: 918.17min\n", + "Epoch time: 1.6449 minutes. 500 epochs: 13.7075 hours.\n", + "Epochs: 475. Working time: 15.30 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park. she saw a big bucket on the s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "45632000 Examples seen. Accuracy: 0.8497 Error: 0.39931 Loss: 0.46755 Threads: 8 Forward time: 3.28s Backward time: 3.78s Step time: 33.64s\n", + "45664000 Examples seen. Accuracy: 0.8488 Error: 0.44477 Loss: 0.51571 Threads: 8 Forward time: 3.51s Backward time: 4.01s Step time: 34.01s\n", + "45696000 Examples seen. Accuracy: 0.8479 Error: 0.45770 Loss: 0.57663 Threads: 8 Forward time: 3.25s Backward time: 3.76s Step time: 33.04s\n", + "Starting Validation.\n", + "Epochs: 476 Examples seen:45696000 Validation Accuracy: 0.8258 Validation Error: 0.4543 Validation Loss: 0.5647 Total time: 919.89min\n", + "Epoch time: 1.6520 minutes. 500 epochs: 13.7671 hours.\n", + "Epochs: 476. Working time: 15.33 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mom. they saw a big s.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "45728000 Examples seen. Accuracy: 0.8730 Error: 0.29705 Loss: 0.35885 Threads: 8 Forward time: 3.26s Backward time: 3.83s Step time: 33.79s\n", + "45760000 Examples seen. Accuracy: 0.8711 Error: 0.41235 Loss: 0.45937 Threads: 8 Forward time: 3.09s Backward time: 3.44s Step time: 33.57s\n", + "45792000 Examples seen. Accuracy: 0.8598 Error: 0.34102 Loss: 0.39622 Threads: 8 Forward time: 3.33s Backward time: 3.86s Step time: 33.19s\n", + "Starting Validation.\n", + "Epochs: 477 Examples seen:45792000 Validation Accuracy: 0.8235 Validation Error: 0.4474 Validation Loss: 0.5723 Total time: 921.61min\n", + "Epoch time: 1.6595 minutes. 500 epochs: 13.8292 hours.\n", + "Epochs: 477. Working time: 15.36 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "45824000 Examples seen. Accuracy: 0.8537 Error: 0.37845 Loss: 0.47390 Threads: 8 Forward time: 3.13s Backward time: 3.67s Step time: 33.72s\n", + "45856000 Examples seen. Accuracy: 0.8628 Error: 0.33306 Loss: 0.34925 Threads: 8 Forward time: 3.20s Backward time: 3.73s Step time: 33.37s\n", + "45888000 Examples seen. Accuracy: 0.8750 Error: 0.40105 Loss: 0.50767 Threads: 8 Forward time: 3.07s Backward time: 3.39s Step time: 34.04s\n", + "Starting Validation.\n", + "Epochs: 478 Examples seen:45888000 Validation Accuracy: 0.8228 Validation Error: 0.4376 Validation Loss: 0.5855 Total time: 923.34min\n", + "Epoch time: 1.7022 minutes. 500 epochs: 14.1850 hours.\n", + "Epochs: 478. Working time: 15.39 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. the sun was shin.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "45920000 Examples seen. Accuracy: 0.8662 Error: 0.37274 Loss: 0.43486 Threads: 8 Forward time: 3.18s Backward time: 3.59s Step time: 33.99s\n", + "45952000 Examples seen. Accuracy: 0.8558 Error: 0.41261 Loss: 0.46927 Threads: 8 Forward time: 3.02s Backward time: 3.32s Step time: 33.77s\n", + "45984000 Examples seen. Accuracy: 0.8521 Error: 0.40218 Loss: 0.45996 Threads: 8 Forward time: 3.05s Backward time: 3.41s Step time: 33.33s\n", + "Starting Validation.\n", + "Epochs: 479 Examples seen:45984000 Validation Accuracy: 0.8239 Validation Error: 0.4493 Validation Loss: 0.5705 Total time: 925.07min\n", + "Epoch time: 1.6666 minutes. 500 epochs: 13.8883 hours.\n", + "Epochs: 479. Working time: 15.42 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "46016000 Examples seen. Accuracy: 0.8498 Error: 0.43576 Loss: 0.54646 Threads: 8 Forward time: 3.37s Backward time: 3.94s Step time: 33.29s\n", + "46048000 Examples seen. Accuracy: 0.8523 Error: 0.39925 Loss: 0.45658 Threads: 8 Forward time: 3.34s Backward time: 3.81s Step time: 33.67s\n", + "46080000 Examples seen. Accuracy: 0.8524 Error: 0.42947 Loss: 0.53754 Threads: 8 Forward time: 3.08s Backward time: 3.41s Step time: 33.39s\n", + "Starting Validation.\n", + "Epochs: 480 Examples seen:46080000 Validation Accuracy: 0.8278 Validation Error: 0.4468 Validation Loss: 0.5656 Total time: 926.78min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.971 Min Weight: -1.007 Max Output: 0.749 Min Output: -0.765 TNNetPointwiseConv 81,1,32 Times: 0.18s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.749 Min Output: -0.765 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.730 Min Weight: -0.739 Max Output: 4.499 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.13s 0.05s Parent:2\n", + "Layer 4 Max Output: 4.499 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.04s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.499 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.395 Min Weight: -0.388 Max Output: 4.812 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.37s 0.25s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.244 Min Weight: -0.249 Max Output: 3.545 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.49s 0.34s Parent:6\n", + "Layer 8 Max Output: 3.545 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.05s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.142 Min Weight: -0.202 Max Output: 2.234 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.70s 0.32s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.213 Max Output: 3.737 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.088 Min Weight: -0.628 Max Output: 15.003 Min Output: -3.941 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.937 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Starting Testing.\n", + "Epochs: 480 Examples seen:46080000 Test Accuracy: 0.8278 Test Error: 0.4468 Test Loss: 0.5656 Total time: 926.82min\n", + "Epoch time: 1.6694 minutes. 500 epochs: 13.9117 hours.\n", + "Epochs: 480. Working time: 15.45 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "46112000 Examples seen. Accuracy: 0.8525 Error: 0.37365 Loss: 0.43939 Threads: 8 Forward time: 3.15s Backward time: 3.61s Step time: 33.65s\n", + "46144000 Examples seen. Accuracy: 0.8521 Error: 0.35033 Loss: 0.40458 Threads: 8 Forward time: 3.08s Backward time: 3.53s Step time: 33.13s\n", + "46176000 Examples seen. Accuracy: 0.8514 Error: 0.31568 Loss: 0.38515 Threads: 8 Forward time: 3.09s Backward time: 3.47s Step time: 33.19s\n", + "Starting Validation.\n", + "Epochs: 481 Examples seen:46176000 Validation Accuracy: 0.8274 Validation Error: 0.4515 Validation Loss: 0.5662 Total time: 928.52min\n", + "Epoch time: 1.6595 minutes. 500 epochs: 13.8292 hours.\n", + "Epochs: 481. Working time: 15.48 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "46208000 Examples seen. Accuracy: 0.8504 Error: 0.37410 Loss: 0.43770 Threads: 8 Forward time: 3.18s Backward time: 3.70s Step time: 33.32s\n", + "46240000 Examples seen. Accuracy: 0.8476 Error: 0.41385 Loss: 0.47308 Threads: 8 Forward time: 3.09s Backward time: 3.45s Step time: 33.79s\n", + "46272000 Examples seen. Accuracy: 0.8484 Error: 0.38589 Loss: 0.41755 Threads: 8 Forward time: 3.31s Backward time: 3.83s Step time: 33.11s\n", + "Starting Validation.\n", + "Epochs: 482 Examples seen:46272000 Validation Accuracy: 0.8280 Validation Error: 0.4452 Validation Loss: 0.5626 Total time: 930.24min\n", + "Epoch time: 1.6554 minutes. 500 epochs: 13.7946 hours.\n", + "Epochs: 482. Working time: 15.50 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. the sun was shin.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "46304000 Examples seen. Accuracy: 0.8632 Error: 0.30500 Loss: 0.31981 Threads: 8 Forward time: 3.53s Backward time: 4.24s Step time: 33.25s\n", + "46336000 Examples seen. Accuracy: 0.8581 Error: 0.41911 Loss: 0.50608 Threads: 8 Forward time: 3.15s Backward time: 3.67s Step time: 34.09s\n", + "46368000 Examples seen. Accuracy: 0.8535 Error: 0.34489 Loss: 0.37343 Threads: 8 Forward time: 3.08s Backward time: 3.45s Step time: 33.78s\n", + "Starting Validation.\n", + "Epochs: 483 Examples seen:46368000 Validation Accuracy: 0.8276 Validation Error: 0.4503 Validation Loss: 0.5673 Total time: 931.96min\n", + "Epoch time: 1.6891 minutes. 500 epochs: 14.0762 hours.\n", + "Epochs: 483. Working time: 15.53 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "46400000 Examples seen. Accuracy: 0.8676 Error: 0.41422 Loss: 0.55199 Threads: 8 Forward time: 3.18s Backward time: 3.57s Step time: 33.84s\n", + "46432000 Examples seen. Accuracy: 0.8566 Error: 0.36619 Loss: 0.42878 Threads: 8 Forward time: 3.11s Backward time: 3.57s Step time: 33.06s\n", + "46464000 Examples seen. Accuracy: 0.8513 Error: 0.46404 Loss: 0.60213 Threads: 8 Forward time: 3.30s Backward time: 3.65s Step time: 33.27s\n", + "Starting Validation.\n", + "Epochs: 484 Examples seen:46464000 Validation Accuracy: 0.8247 Validation Error: 0.4534 Validation Loss: 0.5804 Total time: 933.68min\n", + "Epoch time: 1.6637 minutes. 500 epochs: 13.8646 hours.\n", + "Epochs: 484. Working time: 15.56 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little girl named lily went to the park with her mommy. they walked a .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "46496000 Examples seen. Accuracy: 0.8479 Error: 0.43600 Loss: 0.52449 Threads: 8 Forward time: 3.28s Backward time: 3.70s Step time: 33.54s\n", + "46528000 Examples seen. Accuracy: 0.8529 Error: 0.39545 Loss: 0.48403 Threads: 8 Forward time: 3.29s Backward time: 3.76s Step time: 33.07s\n", + "46560000 Examples seen. Accuracy: 0.8526 Error: 0.34997 Loss: 0.41737 Threads: 8 Forward time: 3.12s Backward time: 3.36s Step time: 33.67s\n", + "Starting Validation.\n", + "Epochs: 485 Examples seen:46560000 Validation Accuracy: 0.8280 Validation Error: 0.4493 Validation Loss: 0.5672 Total time: 935.39min\n", + "Epoch time: 1.6834 minutes. 500 epochs: 14.0283 hours.\n", + "Epochs: 485. Working time: 15.59 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "46592000 Examples seen. Accuracy: 0.8503 Error: 0.40737 Loss: 0.53868 Threads: 8 Forward time: 3.03s Backward time: 3.35s Step time: 32.98s\n", + "46624000 Examples seen. Accuracy: 0.8484 Error: 0.41195 Loss: 0.48230 Threads: 8 Forward time: 3.24s Backward time: 3.44s Step time: 33.60s\n", + "46656000 Examples seen. Accuracy: 0.8475 Error: 0.40981 Loss: 0.54541 Threads: 8 Forward time: 3.38s Backward time: 3.96s Step time: 33.09s\n", + "Starting Validation.\n", + "Epochs: 486 Examples seen:46656000 Validation Accuracy: 0.8253 Validation Error: 0.4520 Validation Loss: 0.5700 Total time: 937.09min\n", + "Epoch time: 1.6547 minutes. 500 epochs: 13.7892 hours.\n", + "Epochs: 486. Working time: 15.62 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park. she saw a big block with her .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "46688000 Examples seen. Accuracy: 0.8485 Error: 0.42796 Loss: 0.53532 Threads: 8 Forward time: 3.19s Backward time: 3.87s Step time: 33.36s\n", + "46720000 Examples seen. Accuracy: 0.8522 Error: 0.39224 Loss: 0.44774 Threads: 8 Forward time: 3.15s Backward time: 3.67s Step time: 33.39s\n", + "46752000 Examples seen. Accuracy: 0.8518 Error: 0.37097 Loss: 0.43066 Threads: 8 Forward time: 3.36s Backward time: 3.95s Step time: 33.33s\n", + "Starting Validation.\n", + "Epochs: 487 Examples seen:46752000 Validation Accuracy: 0.8230 Validation Error: 0.4515 Validation Loss: 0.5741 Total time: 938.81min\n", + "Epoch time: 1.6667 minutes. 500 epochs: 13.8892 hours.\n", + "Epochs: 487. Working time: 15.65 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. they wanted to go .\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "46784000 Examples seen. Accuracy: 0.8719 Error: 0.31649 Loss: 0.33551 Threads: 8 Forward time: 3.23s Backward time: 3.83s Step time: 34.60s\n", + "46816000 Examples seen. Accuracy: 0.8810 Error: 0.35668 Loss: 0.38104 Threads: 8 Forward time: 3.23s Backward time: 3.83s Step time: 34.96s\n", + "46848000 Examples seen. Accuracy: 0.8653 Error: 0.40052 Loss: 0.49620 Threads: 8 Forward time: 3.36s Backward time: 4.01s Step time: 34.92s\n", + "Starting Validation.\n", + "Epochs: 488 Examples seen:46848000 Validation Accuracy: 0.8237 Validation Error: 0.4503 Validation Loss: 0.5699 Total time: 940.59min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.972 Min Weight: -1.011 Max Output: 0.750 Min Output: -0.766 TNNetPointwiseConv 81,1,32 Times: 0.17s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.750 Min Output: -0.766 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.732 Min Weight: -0.736 Max Output: 4.501 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.14s 0.08s Parent:2\n", + "Layer 4 Max Output: 4.501 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.01s Parent:3\n", + "Layer 5 Max Output: 4.501 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.392 Min Weight: -0.391 Max Output: 4.841 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.40s 0.29s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.244 Min Weight: -0.249 Max Output: 3.832 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.57s 0.43s Parent:6\n", + "Layer 8 Max Output: 3.832 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.07s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.143 Min Weight: -0.208 Max Output: 2.231 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.73s 0.36s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.260 Min Weight: -0.211 Max Output: 4.020 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.04s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.090 Min Weight: -0.630 Max Output: 16.847 Min Output: -4.225 TNNetFullConnectLinear 128,1,1 Times: 0.04s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.979 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.7461 minutes. 500 epochs: 14.5508 hours.\n", + "Epochs: 488. Working time: 15.68 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "46880000 Examples seen. Accuracy: 0.8566 Error: 0.38351 Loss: 0.48452 Threads: 8 Forward time: 3.28s Backward time: 3.67s Step time: 33.84s\n", + "46912000 Examples seen. Accuracy: 0.8709 Error: 0.29327 Loss: 0.29602 Threads: 8 Forward time: 3.22s Backward time: 3.86s Step time: 33.89s\n", + "46944000 Examples seen. Accuracy: 0.8805 Error: 0.34852 Loss: 0.42410 Threads: 8 Forward time: 3.60s Backward time: 4.30s Step time: 35.38s\n", + "Starting Validation.\n", + "Epochs: 489 Examples seen:46944000 Validation Accuracy: 0.8201 Validation Error: 0.4420 Validation Loss: 0.6007 Total time: 942.36min\n", + "Epoch time: 1.7690 minutes. 500 epochs: 14.7417 hours.\n", + "Epochs: 489. Working time: 15.71 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "46976000 Examples seen. Accuracy: 0.8867 Error: 0.30864 Loss: 0.32842 Threads: 8 Forward time: 3.56s Backward time: 4.01s Step time: 34.24s\n", + "47008000 Examples seen. Accuracy: 0.8677 Error: 0.37327 Loss: 0.43529 Threads: 8 Forward time: 3.20s Backward time: 3.61s Step time: 33.88s\n", + "47040000 Examples seen. Accuracy: 0.8577 Error: 0.42853 Loss: 0.55954 Threads: 8 Forward time: 3.31s Backward time: 3.77s Step time: 33.59s\n", + "Starting Validation.\n", + "VALIDATION RECORD! Saving NN at autosave.nn\n", + "Epochs: 490 Examples seen:47040000 Validation Accuracy: 0.8285 Validation Error: 0.4476 Validation Loss: 0.5652 Total time: 944.14min\n", + "Starting Testing.\n", + "Epochs: 490 Examples seen:47040000 Test Accuracy: 0.8285 Test Error: 0.4476 Test Loss: 0.5652 Total time: 944.17min\n", + "Epoch time: 1.6797 minutes. 500 epochs: 13.9975 hours.\n", + "Epochs: 490. Working time: 15.74 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in her garden. she saw a big car wi.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47072000 Examples seen. Accuracy: 0.8537 Error: 0.35780 Loss: 0.41942 Threads: 8 Forward time: 3.08s Backward time: 3.34s Step time: 33.67s\n", + "47104000 Examples seen. Accuracy: 0.8733 Error: 0.30452 Loss: 0.29377 Threads: 8 Forward time: 3.11s Backward time: 3.63s Step time: 34.22s\n", + "47136000 Examples seen. Accuracy: 0.8801 Error: 0.32125 Loss: 0.31951 Threads: 8 Forward time: 3.07s Backward time: 3.56s Step time: 33.80s\n", + "Starting Validation.\n", + "Epochs: 491 Examples seen:47136000 Validation Accuracy: 0.8266 Validation Error: 0.4370 Validation Loss: 0.5770 Total time: 945.91min\n", + "Epoch time: 1.6901 minutes. 500 epochs: 14.0842 hours.\n", + "Epochs: 491. Working time: 15.77 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy went to the park with her mom. the sky and want.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47168000 Examples seen. Accuracy: 0.8760 Error: 0.41741 Loss: 0.50088 Threads: 8 Forward time: 3.19s Backward time: 3.63s Step time: 33.89s\n", + "47200000 Examples seen. Accuracy: 0.8639 Error: 0.33372 Loss: 0.33754 Threads: 8 Forward time: 2.94s Backward time: 3.29s Step time: 33.37s\n", + "47232000 Examples seen. Accuracy: 0.8680 Error: 0.30858 Loss: 0.28759 Threads: 8 Forward time: 3.22s Backward time: 3.76s Step time: 33.72s\n", + "Starting Validation.\n", + "Epochs: 492 Examples seen:47232000 Validation Accuracy: 0.8253 Validation Error: 0.4299 Validation Loss: 0.5753 Total time: 947.64min\n", + "Epoch time: 1.6860 minutes. 500 epochs: 14.0500 hours.\n", + "Epochs: 492. Working time: 15.79 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47264000 Examples seen. Accuracy: 0.8675 Error: 0.41148 Loss: 0.48585 Threads: 8 Forward time: 3.28s Backward time: 3.72s Step time: 34.28s\n", + "47296000 Examples seen. Accuracy: 0.8547 Error: 0.37414 Loss: 0.44306 Threads: 8 Forward time: 3.05s Backward time: 3.45s Step time: 33.30s\n", + "47328000 Examples seen. Accuracy: 0.8530 Error: 0.38659 Loss: 0.43903 Threads: 8 Forward time: 3.50s Backward time: 4.09s Step time: 33.16s\n", + "Starting Validation.\n", + "Epochs: 493 Examples seen:47328000 Validation Accuracy: 0.8258 Validation Error: 0.4492 Validation Loss: 0.5802 Total time: 949.36min\n", + "Epoch time: 1.6582 minutes. 500 epochs: 13.8179 hours.\n", + "Epochs: 493. Working time: 15.82 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47360000 Examples seen. Accuracy: 0.8826 Error: 0.36416 Loss: 0.41191 Threads: 8 Forward time: 3.19s Backward time: 3.81s Step time: 34.24s\n", + "47392000 Examples seen. Accuracy: 0.8613 Error: 0.35558 Loss: 0.43178 Threads: 8 Forward time: 3.14s Backward time: 3.61s Step time: 35.22s\n", + "47424000 Examples seen. Accuracy: 0.8593 Error: 0.40838 Loss: 0.48130 Threads: 8 Forward time: 3.39s Backward time: 3.81s Step time: 33.17s\n", + "Starting Validation.\n", + "Epochs: 494 Examples seen:47424000 Validation Accuracy: 0.8235 Validation Error: 0.4482 Validation Loss: 0.5730 Total time: 951.12min\n", + "Epoch time: 1.6584 minutes. 500 epochs: 13.8200 hours.\n", + "Epochs: 494. Working time: 15.85 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside a.\n", + "Max prediction pos is: 81\n", + "47456000 Examples seen. Accuracy: 0.8555 Error: 0.30599 Loss: 0.30133 Threads: 8 Forward time: 3.27s Backward time: 3.76s Step time: 33.42s\n", + "47488000 Examples seen. Accuracy: 0.8548 Error: 0.39322 Loss: 0.47173 Threads: 8 Forward time: 3.66s Backward time: 4.09s Step time: 33.29s\n", + "47520000 Examples seen. Accuracy: 0.8631 Error: 0.30944 Loss: 0.29760 Threads: 8 Forward time: 3.16s Backward time: 3.52s Step time: 33.36s\n", + "Starting Validation.\n", + "Epochs: 495 Examples seen:47520000 Validation Accuracy: 0.8228 Validation Error: 0.4339 Validation Loss: 0.5732 Total time: 952.83min\n", + "Epoch time: 1.6678 minutes. 500 epochs: 13.8979 hours.\n", + "Epochs: 495. Working time: 15.88 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. tim saw a big tree.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47552000 Examples seen. Accuracy: 0.8640 Error: 0.42493 Loss: 0.53032 Threads: 8 Forward time: 3.25s Backward time: 3.62s Step time: 34.13s\n", + "47584000 Examples seen. Accuracy: 0.8529 Error: 0.39271 Loss: 0.50408 Threads: 8 Forward time: 3.25s Backward time: 3.70s Step time: 32.85s\n", + "47616000 Examples seen. Accuracy: 0.8517 Error: 0.38127 Loss: 0.42391 Threads: 8 Forward time: 3.17s Backward time: 3.54s Step time: 33.36s\n", + "Starting Validation.\n", + "Epochs: 496 Examples seen:47616000 Validation Accuracy: 0.8226 Validation Error: 0.4526 Validation Loss: 0.5687 Total time: 954.54min\n", + "Layer 0 Max Output: 1.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.971 Min Weight: -1.011 Max Output: 0.749 Min Output: -0.766 TNNetPointwiseConv 81,1,32 Times: 0.17s 0.09s Parent:0\n", + "Layer 2 Max Output: 0.749 Min Output: -0.766 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.741 Min Weight: -0.736 Max Output: 4.549 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.15s 0.07s Parent:2\n", + "Layer 4 Max Output: 4.549 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.03s 0.00s Parent:3\n", + "Layer 5 Max Output: 4.549 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.401 Min Weight: -0.392 Max Output: 4.514 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.43s 0.24s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.246 Min Weight: -0.250 Max Output: 3.825 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 1.41s 0.37s Parent:6\n", + "Layer 8 Max Output: 3.825 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.06s 0.01s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.142 Min Weight: -0.207 Max Output: 2.233 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.70s 0.34s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.261 Min Weight: -0.213 Max Output: 3.761 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.05s 0.03s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.090 Min Weight: -0.633 Max Output: 16.636 Min Output: -4.136 TNNetFullConnectLinear 128,1,1 Times: 0.03s 0.01s Parent:10\n", + "Layer 12 Max Output: 0.990 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Epoch time: 1.6679 minutes. 500 epochs: 13.8988 hours.\n", + "Epochs: 496. Working time: 15.91 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47648000 Examples seen. Accuracy: 0.8679 Error: 0.37684 Loss: 0.45146 Threads: 8 Forward time: 3.14s Backward time: 3.66s Step time: 33.76s\n", + "47680000 Examples seen. Accuracy: 0.8741 Error: 0.35398 Loss: 0.40003 Threads: 8 Forward time: 3.40s Backward time: 3.94s Step time: 34.10s\n", + "47712000 Examples seen. Accuracy: 0.8643 Error: 0.40772 Loss: 0.47312 Threads: 8 Forward time: 3.31s Backward time: 3.79s Step time: 34.25s\n", + "Starting Validation.\n", + "Epochs: 497 Examples seen:47712000 Validation Accuracy: 0.8237 Validation Error: 0.4519 Validation Loss: 0.5680 Total time: 956.29min\n", + "Epoch time: 1.7124 minutes. 500 epochs: 14.2700 hours.\n", + "Epochs: 497. Working time: 15.94 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little boy named tim went to the park with his mom. they saw a big tre.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47744000 Examples seen. Accuracy: 0.8528 Error: 0.41563 Loss: 0.47307 Threads: 8 Forward time: 3.30s Backward time: 3.66s Step time: 33.45s\n", + "47776000 Examples seen. Accuracy: 0.8506 Error: 0.40070 Loss: 0.47173 Threads: 8 Forward time: 3.13s Backward time: 3.50s Step time: 33.53s\n", + "47808000 Examples seen. Accuracy: 0.8502 Error: 0.41931 Loss: 0.50579 Threads: 8 Forward time: 2.96s Backward time: 3.20s Step time: 32.30s\n", + "Starting Validation.\n", + "Epochs: 498 Examples seen:47808000 Validation Accuracy: 0.8278 Validation Error: 0.4492 Validation Loss: 0.5637 Total time: 957.98min\n", + "Epoch time: 1.6150 minutes. 500 epochs: 13.4583 hours.\n", + "Epochs: 498. Working time: 15.97 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "one day, a little boy named tim went to the park with his mom. tim was very excit.\n", + "once upon a time, there was a little girl named lily. she loved to play with her .\n", + "Max prediction pos is: 81\n", + "47840000 Examples seen. Accuracy: 0.8510 Error: 0.41134 Loss: 0.44927 Threads: 8 Forward time: 3.07s Backward time: 3.39s Step time: 33.02s\n", + "47872000 Examples seen. Accuracy: 0.8525 Error: 0.41538 Loss: 0.45658 Threads: 8 Forward time: 3.08s Backward time: 3.32s Step time: 31.68s\n", + "47904000 Examples seen. Accuracy: 0.8659 Error: 0.42764 Loss: 0.52379 Threads: 8 Forward time: 3.05s Backward time: 3.18s Step time: 31.10s\n", + "Starting Validation.\n", + "Epochs: 499 Examples seen:47904000 Validation Accuracy: 0.8283 Validation Error: 0.4524 Validation Loss: 0.5662 Total time: 959.62min\n", + "Epoch time: 1.5551 minutes. 500 epochs: 12.9596 hours.\n", + "Epochs: 499. Working time: 15.99 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily went to the park with her mom. they saw a big t.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "47936000 Examples seen. Accuracy: 0.8557 Error: 0.39071 Loss: 0.45582 Threads: 8 Forward time: 2.91s Backward time: 3.19s Step time: 29.88s\n", + "47968000 Examples seen. Accuracy: 0.8534 Error: 0.43360 Loss: 0.50318 Threads: 8 Forward time: 2.89s Backward time: 2.99s Step time: 29.88s\n", + "48000000 Examples seen. Accuracy: 0.8555 Error: 0.43003 Loss: 0.50325 Threads: 8 Forward time: 2.79s Backward time: 2.73s Step time: 28.45s\n", + "Starting Validation.\n", + "Epochs: 500 Examples seen:48000000 Validation Accuracy: 0.8266 Validation Error: 0.4480 Validation Loss: 0.5683 Total time: 961.13min\n", + "Starting Testing.\n", + "Epochs: 500 Examples seen:48000000 Test Accuracy: 0.8266 Test Error: 0.4480 Test Loss: 0.5683 Total time: 961.16min\n", + "Epoch time: 1.4227 minutes. 500 epochs: 11.8554 hours.\n", + "Epochs: 500. Working time: 16.02 hours.\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lucy was walking in the park. she saw a big bowl on .\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n", + "Loading autosave.nn for final test.\n", + "Starting Testing.\n", + "Epochs: 500 Examples seen:48000000 Test Accuracy: 0.8285 Test Error: 0.4476 Test Loss: 0.5652 Total time: 961.22min\n", + "Loading best performing results autosave.nn.\n", + "Finished.\n", + "Layer 0 Max Output: 0.000 Min Output: 0.000 TNNetInput 81,1,128 Times: 0.00s 0.00s\n", + "Layer 1 Neurons: 32 Max Weight: 0.972 Min Weight: -1.013 Max Output: 0.000 Min Output: 0.000 TNNetPointwiseConv 81,1,32 Times: 0.00s 0.00s Parent:0\n", + "Layer 2 Max Output: 0.000 Min Output: 0.000 TNNetPadXY 83,1,32 Times: 0.00s 0.00s Parent:1\n", + "Layer 3 Neurons: 64 Max Weight: 0.737 Min Weight: -0.735 Max Output: 0.000 Min Output: 0.000 TNNetConvolutionReLU 81,1,64 Times: 0.00s 0.00s Parent:2\n", + "Layer 4 Max Output: 0.000 Min Output: 0.000 TNNetMaxPool 27,1,64 Times: 0.00s 0.00s Parent:3\n", + "Layer 5 Max Output: 0.000 Min Output: 0.000 TNNetPadXY 29,1,64 Times: 0.00s 0.00s Parent:4\n", + "Layer 6 Neurons:384 Max Weight: 0.398 Min Weight: -0.392 Max Output: 0.000 Min Output: 0.000 TNNetConvolutionReLU 27,1,384 Times: 0.00s 0.00s Parent:5\n", + "Layer 7 Neurons:1024 Max Weight: 0.245 Min Weight: -0.249 Max Output: 0.000 Min Output: 0.000 TNNetPointwiseConvReLU 27,1,1024 Times: 0.00s 0.00s Parent:6\n", + "Layer 8 Max Output: 0.000 Min Output: 0.000 TNNetMaxPoolWithPosition 1,1,2048 Times: 0.00s 0.00s Parent:7\n", + "Layer 9 Neurons:1024 Max Weight: 0.143 Min Weight: -0.208 Max Output: 0.000 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,1024 Times: 0.00s 0.00s Parent:8\n", + "Layer 10 Neurons:128 Max Weight: 0.261 Min Weight: -0.212 Max Output: 0.000 Min Output: 0.000 TNNetPointwiseConvReLU 1,1,128 Times: 0.00s 0.00s Parent:9\n", + "Layer 11 Neurons:128 Max Weight: 1.092 Min Weight: -0.630 Max Output: 0.000 Min Output: 0.000 TNNetFullConnectLinear 128,1,1 Times: 0.00s 0.00s Parent:10\n", + "Layer 12 Max Output: 0.000 Min Output: 0.000 TNNetSoftMax 128,1,1 Times: 0.00s 0.00s Parent:11\n", + "Testing.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "one day, a little girl named lily was playing in her garden. she saw a big car wi.\n", + "once upon a time, there was a little girl named lily. she loved to play outside i.\n", + "Max prediction pos is: 81\n" + ] + } + ], + "source": [ + "!neural-api/bin/x86_64-linux/bin/CaiOptimizedDenseNet" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "wSnDzsDfBHBh", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "a361fa0d-b5a4-4f24-f6de-bd37b1251516" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "epoch,training accuracy,training loss,training error,validation accuracy,validation loss,validation error,learning rate,time,test accuracy,test loss,test error\n", + "1,0.7344,0.6005,0.4323,0.2026,10.7440,1.6365,0.0001000,95,,,\n", + "2,0.8830,0.4258,0.3229,0.1271,9.7572,1.7555,0.0001000,187,,,\n", + "3,0.8523,0.3943,0.3334,0.1125,8.8159,1.7831,0.0001000,280,,,\n", + "4,0.8470,0.6063,0.4446,0.1098,11.2453,1.7933,0.0001000,375,,,\n", + "5,0.8424,0.5759,0.5095,0.1048,11.0278,1.8023,0.0001000,469,,,\n", + "6,0.8712,0.4172,0.3530,0.1144,11.0418,1.7850,0.0001000,565,,,\n", + "7,0.8884,0.3228,0.2597,0.1279,10.1913,1.7566,0.0001000,663,,,\n", + "8,0.8909,0.4237,0.3561,0.1379,9.6974,1.7370,0.0001000,767,,,\n", + "9,0.8898,0.3702,0.2837,0.1686,9.8982,1.6769,0.0001000,877,,,\n", + "10,0.8921,0.3697,0.3066,0.1987,8.9174,1.6257,0.0001000,993,0.1987,8.9174,1.6257\n", + "11,0.8946,0.3656,0.2824,0.2154,9.7150,1.5984,0.0001000,1103,,,\n", + "12,0.8854,0.4107,0.3113,0.2204,8.9187,1.5939,0.0001000,1216,,,\n", + "13,0.8849,0.5218,0.3871,0.2325,7.9209,1.5727,0.0001000,1331,,,\n", + "14,0.8870,0.3961,0.3083,0.2535,7.2556,1.5318,0.0001000,1459,,,\n", + "15,0.8823,0.4129,0.3278,0.2611,6.1908,1.5066,0.0001000,1589,,,\n", + "16,0.8762,0.5493,0.3913,0.2813,5.2988,1.4713,0.0001000,1719,,,\n", + "17,0.8692,0.3981,0.3283,0.3063,4.2153,1.4500,0.0001000,1850,,,\n", + "18,0.8666,0.4712,0.3816,0.3414,3.8292,1.3953,0.0001000,1982,,,\n", + "19,0.8647,0.4679,0.3489,0.3454,3.4678,1.3888,0.0001000,2115,,,\n", + "20,0.8660,0.4640,0.3529,0.3464,3.5444,1.3914,0.0001000,2250,0.3464,3.5444,1.3914\n", + "21,0.8675,0.4197,0.3739,0.3829,2.8933,1.3181,0.0001000,2382,,,\n", + "22,0.8702,0.4401,0.3370,0.4149,2.8237,1.2905,0.0001000,2512,,,\n", + "23,0.8686,0.6697,0.4280,0.4629,2.2027,1.2015,0.0001000,2643,,,\n", + "24,0.8689,0.4349,0.3598,0.4601,2.3262,1.2055,0.0001000,2775,,,\n", + "25,0.8684,0.6115,0.4350,0.4380,2.5220,1.2224,0.0001000,2904,,,\n", + "26,0.8712,0.3964,0.3350,0.4900,2.0931,1.1423,0.0001000,3035,,,\n", + "27,0.8700,0.5493,0.4125,0.5294,1.8396,1.0854,0.0001000,3166,,,\n", + "28,0.8676,0.5710,0.3909,0.5557,1.7513,1.0354,0.0001000,3300,,,\n", + "29,0.8619,0.5197,0.4077,0.5563,1.7343,1.0320,0.0001000,3432,,,\n", + "30,0.8528,0.5934,0.4121,0.5540,1.7218,1.0452,0.0001000,3563,0.5540,1.7218,1.0452\n", + "31,0.8519,0.5256,0.3950,0.6023,1.5543,0.9582,0.0001000,3695,,,\n", + "32,0.8471,0.4645,0.4061,0.5993,1.5214,0.9663,0.0001000,3822,,,\n", + "33,0.8432,0.5153,0.4055,0.6100,1.4713,0.9436,0.0001000,3955,,,\n", + "34,0.8424,0.5789,0.4403,0.6210,1.4211,0.9345,0.0001000,4087,,,\n", + "35,0.8405,0.5160,0.4235,0.6162,1.4266,0.9438,0.0001000,4221,,,\n", + "36,0.8353,0.5602,0.4565,0.6352,1.3608,0.9056,0.0001000,4356,,,\n", + "37,0.8306,0.6577,0.4850,0.6313,1.3697,0.8963,0.0001000,4487,,,\n", + "38,0.8295,0.5582,0.4141,0.6444,1.2987,0.8749,0.0001000,4619,,,\n", + "39,0.8258,0.6955,0.5360,0.6488,1.2835,0.8795,0.0001000,4745,,,\n", + "40,0.8236,0.7067,0.4818,0.6415,1.3152,0.8658,0.0001000,4875,0.6415,1.3152,0.8658\n", + "41,0.8205,0.7678,0.5341,0.6390,1.2893,0.8887,0.0001000,5002,,,\n", + "42,0.8193,0.7444,0.5635,0.6534,1.2328,0.8720,0.0001000,5134,,,\n", + "43,0.8175,0.6475,0.4662,0.6576,1.2665,0.8338,0.0001000,5268,,,\n", + "44,0.8198,0.5577,0.4241,0.6632,1.2139,0.8319,0.0001000,5403,,,\n", + "45,0.8137,0.6308,0.4876,0.6669,1.1957,0.8295,0.0001000,5539,,,\n", + "46,0.8150,0.6015,0.4937,0.6617,1.2220,0.8366,0.0001000,5672,,,\n", + "47,0.8121,0.5275,0.4401,0.6724,1.1679,0.8154,0.0001000,5810,,,\n", + "48,0.8093,0.7768,0.5787,0.6644,1.1827,0.8160,0.0001000,5945,,,\n", + "49,0.8095,0.7238,0.5397,0.6603,1.1844,0.8301,0.0001000,6085,,,\n", + "50,0.8063,0.7126,0.5449,0.6813,1.1259,0.7923,0.0001000,6225,0.6813,1.1259,0.7923\n", + "51,0.8059,0.6619,0.5444,0.6849,1.1214,0.7848,0.0001000,6361,,,\n", + "52,0.8033,0.6397,0.5094,0.6926,1.0918,0.7583,0.0001000,6497,,,\n", + "53,0.8000,0.6784,0.5193,0.6957,1.0715,0.7812,0.0001000,6633,,,\n", + "54,0.8013,0.5895,0.4850,0.6939,1.0812,0.7638,0.0001000,6762,,,\n", + "55,0.7990,0.7292,0.5442,0.7028,1.0515,0.7613,0.0001000,6892,,,\n", + "56,0.7958,0.7559,0.5740,0.6912,1.0632,0.7749,0.0001000,7021,,,\n", + "57,0.7972,0.6589,0.5330,0.7108,1.0264,0.7563,0.0001000,7152,,,\n", + "58,0.7946,0.6622,0.5540,0.7106,1.0134,0.7474,0.0001000,7283,,,\n", + "59,0.7914,0.7055,0.5426,0.7137,1.0205,0.7420,0.0001000,7417,,,\n", + "60,0.7923,0.6458,0.5598,0.7137,1.0244,0.7243,0.0001000,7552,0.7137,1.0244,0.7243\n", + "61,0.7917,0.7051,0.5302,0.7222,0.9810,0.7234,0.0001000,7686,,,\n", + "62,0.7888,0.6698,0.5232,0.7227,0.9820,0.7186,0.0001000,7822,,,\n", + "63,0.7907,0.6970,0.5258,0.7131,0.9923,0.7238,0.0001000,7954,,,\n", + "64,0.7861,0.6093,0.5095,0.7170,0.9853,0.7285,0.0001000,8084,,,\n", + "65,0.7854,0.7936,0.6123,0.7254,0.9555,0.7076,0.0001000,8216,,,\n", + "66,0.7852,0.8385,0.5827,0.7258,0.9624,0.7074,0.0001000,8345,,,\n", + "67,0.7838,0.9154,0.6758,0.7295,0.9429,0.6978,0.0001000,8478,,,\n", + "68,0.7803,0.7949,0.6084,0.7281,0.9419,0.6950,0.0001000,8606,,,\n", + "69,0.7816,0.7514,0.5899,0.7346,0.9393,0.6846,0.0001000,8737,,,\n", + "70,0.7830,0.6518,0.5238,0.7321,0.9285,0.6947,0.0001000,8868,0.7321,0.9285,0.6947\n", + "71,0.7800,0.7065,0.5566,0.7306,0.9163,0.6930,0.0001000,8994,,,\n", + "72,0.7789,0.7818,0.6107,0.7373,0.9122,0.6826,0.0001000,9123,,,\n", + "73,0.7751,0.6792,0.5713,0.7327,0.9344,0.6783,0.0001000,9251,,,\n", + "74,0.7763,0.9101,0.7101,0.7364,0.9117,0.6863,0.0001000,9378,,,\n", + "75,0.7710,0.7909,0.6172,0.7435,0.8905,0.6765,0.0001000,9509,,,\n", + "76,0.7716,0.7720,0.6119,0.7421,0.8974,0.6776,0.0001000,9642,,,\n", + "77,0.7735,0.5616,0.5270,0.7406,0.8947,0.6686,0.0001000,9773,,,\n", + "78,0.7695,0.8320,0.6394,0.7437,0.8695,0.6553,0.0001000,9903,,,\n", + "79,0.7710,0.7051,0.5693,0.7462,0.8660,0.6513,0.0001000,10036,,,\n", + "80,0.7680,0.8076,0.6284,0.7473,0.8721,0.6499,0.0001000,10169,0.7473,0.8721,0.6499\n", + "81,0.7713,0.7612,0.6040,0.7431,0.8750,0.6559,0.0001000,10301,,,\n", + "82,0.7722,0.7177,0.5526,0.7513,0.8602,0.6494,0.0001000,10436,,,\n", + "83,0.7748,0.6566,0.5599,0.7481,0.8514,0.6488,0.0001000,10569,,,\n", + "84,0.7765,0.8255,0.6257,0.7502,0.8468,0.6329,0.0001000,10700,,,\n", + "85,0.7778,0.7574,0.5758,0.7621,0.8496,0.6270,0.0001000,10830,,,\n", + "86,0.7798,0.7441,0.5814,0.7550,0.8443,0.6401,0.0001000,10958,,,\n", + "87,0.7781,0.7723,0.6484,0.7573,0.8354,0.6392,0.0001000,11094,,,\n", + "88,0.7814,0.6496,0.5297,0.7561,0.8532,0.6359,0.0001000,11231,,,\n", + "89,0.7811,0.7144,0.5555,0.7556,0.8358,0.6282,0.0001000,11369,,,\n", + "90,0.7792,0.6136,0.5287,0.7606,0.8375,0.6419,0.0001000,11509,0.7606,0.8375,0.6419\n", + "91,0.7826,0.7341,0.5884,0.7525,0.8413,0.6435,0.0001000,11643,,,\n", + "92,0.7837,0.8082,0.6032,0.7583,0.8297,0.6163,0.0001000,11777,,,\n", + "93,0.7876,0.5963,0.5100,0.7625,0.8146,0.6212,0.0001000,11907,,,\n", + "94,0.7871,0.6640,0.5528,0.7661,0.8315,0.6182,0.0001000,12035,,,\n", + "95,0.7854,0.6007,0.5181,0.7590,0.8221,0.6264,0.0001000,12160,,,\n", + "96,0.7864,0.7162,0.5467,0.7638,0.8113,0.6134,0.0001000,12284,,,\n", + "97,0.7914,0.7228,0.5386,0.7629,0.8160,0.6056,0.0001000,12407,,,\n", + "98,0.7892,0.5928,0.5218,0.7652,0.8126,0.6064,0.0001000,12534,,,\n", + "99,0.7892,0.8355,0.6293,0.7671,0.8109,0.6160,0.0001000,12662,,,\n", + "100,0.7905,0.5998,0.5082,0.7615,0.8225,0.6103,0.0001000,12793,0.7615,0.8225,0.6103\n", + "101,0.7922,0.7093,0.5527,0.7665,0.7993,0.6139,0.0001000,12921,,,\n", + "102,0.7909,0.7161,0.5384,0.7679,0.8018,0.6102,0.0001000,13047,,,\n", + "103,0.7963,0.6589,0.5512,0.7692,0.8138,0.6034,0.0001000,13176,,,\n", + "104,0.7973,0.6393,0.5592,0.7650,0.8008,0.6021,0.0001000,13303,,,\n", + "105,0.7943,0.7827,0.6205,0.7686,0.8079,0.6036,0.0001000,13430,,,\n", + "106,0.7978,0.7290,0.5518,0.7661,0.7995,0.6000,0.0001000,13556,,,\n", + "107,0.7939,0.6716,0.5333,0.7732,0.7881,0.5959,0.0001000,13683,,,\n", + "108,0.7939,0.6976,0.5407,0.7775,0.7831,0.5944,0.0001000,13808,,,\n", + "109,0.7984,0.7877,0.5843,0.7767,0.7942,0.6039,0.0001000,13930,,,\n", + "110,0.7967,0.7015,0.5505,0.7692,0.8023,0.5978,0.0001000,14051,0.7692,0.8023,0.5978\n", + "111,0.8033,0.6396,0.5344,0.7719,0.7753,0.5972,0.0001000,14173,,,\n", + "112,0.7978,0.7034,0.6037,0.7734,0.7932,0.6040,0.0001000,14293,,,\n", + "113,0.7992,0.6288,0.4558,0.7796,0.7781,0.5804,0.0001000,14418,,,\n", + "114,0.8028,0.6026,0.5205,0.7821,0.7756,0.5893,0.0001000,14548,,,\n", + "115,0.8011,0.6822,0.5462,0.7798,0.7853,0.5826,0.0001000,14670,,,\n", + "116,0.8006,0.7128,0.5788,0.7798,0.7828,0.5787,0.0001000,14793,,,\n", + "117,0.7982,0.7259,0.5842,0.7765,0.7864,0.5822,0.0001000,14914,,,\n", + "118,0.8009,0.6560,0.4766,0.7794,0.7683,0.5758,0.0001000,15037,,,\n", + "119,0.8018,0.5639,0.4527,0.7746,0.7898,0.5754,0.0001000,15160,,,\n", + "120,0.8049,0.5877,0.4731,0.7796,0.7757,0.5789,0.0001000,15285,0.7796,0.7757,0.5789\n", + "121,0.8013,0.7371,0.5385,0.7788,0.7685,0.5805,0.0001000,15406,,,\n", + "122,0.8045,0.6928,0.5578,0.7819,0.7632,0.5777,0.0001000,15526,,,\n", + "123,0.8081,0.6002,0.5290,0.7809,0.7618,0.5739,0.0001000,15647,,,\n", + "124,0.8061,0.7054,0.5501,0.7863,0.7622,0.5674,0.0001000,15769,,,\n", + "125,0.8081,0.5198,0.4534,0.7773,0.7621,0.5689,0.0001000,15892,,,\n", + "126,0.8076,0.5747,0.5086,0.7838,0.7499,0.5757,0.0001000,16012,,,\n", + "127,0.8067,0.6420,0.5486,0.7809,0.7672,0.5780,0.0001000,16133,,,\n", + "128,0.8076,0.6338,0.5415,0.7844,0.7625,0.5673,0.0001000,16252,,,\n", + "129,0.8048,0.6932,0.5565,0.7867,0.7597,0.5772,0.0001000,16375,,,\n", + "130,0.8072,0.6360,0.5315,0.7807,0.7572,0.5735,0.0001000,16496,0.7807,0.7572,0.5735\n", + "131,0.8135,0.6060,0.5114,0.7846,0.7446,0.5676,0.0001000,16614,,,\n", + "132,0.8107,0.5508,0.4882,0.7809,0.7591,0.5632,0.0001000,16734,,,\n", + "133,0.8095,0.6150,0.5046,0.7811,0.7540,0.5701,0.0001000,16857,,,\n", + "134,0.8085,0.6398,0.5479,0.7844,0.7504,0.5601,0.0001000,16978,,,\n", + "135,0.8076,0.7431,0.5844,0.7861,0.7429,0.5636,0.0001000,17096,,,\n", + "136,0.8107,0.7549,0.5875,0.7826,0.7444,0.5737,0.0001000,17213,,,\n", + "137,0.8101,0.5490,0.4632,0.7796,0.7430,0.5697,0.0001000,17332,,,\n", + "138,0.8152,0.6539,0.5360,0.7901,0.7375,0.5707,0.0001000,17452,,,\n", + "139,0.8104,0.6478,0.5053,0.7890,0.7366,0.5550,0.0001000,17569,,,\n", + "140,0.8167,0.6529,0.4947,0.7890,0.7513,0.5488,0.0001000,17689,0.7890,0.7513,0.5488\n", + "141,0.8113,0.5448,0.4732,0.7863,0.7489,0.5516,0.0001000,17805,,,\n", + "142,0.8152,0.6644,0.5185,0.7899,0.7277,0.5443,0.0001000,17919,,,\n", + "143,0.8178,0.6001,0.5268,0.7890,0.7359,0.5516,0.0001000,18033,,,\n", + "144,0.8122,0.5263,0.4540,0.7821,0.7302,0.5543,0.0001000,18148,,,\n", + "145,0.8095,0.7126,0.5458,0.7907,0.7330,0.5557,0.0001000,18264,,,\n", + "146,0.8149,0.6060,0.5045,0.7894,0.7270,0.5586,0.0001000,18380,,,\n", + "147,0.8134,0.5639,0.5015,0.7922,0.7246,0.5447,0.0001000,18496,,,\n", + "148,0.8223,0.5645,0.4807,0.7888,0.7365,0.5481,0.0001000,18613,,,\n", + "149,0.8154,0.5412,0.4784,0.7926,0.7226,0.5456,0.0001000,18731,,,\n", + "150,0.8176,0.6311,0.4874,0.7892,0.7128,0.5530,0.0001000,18854,0.7892,0.7128,0.5530\n", + "151,0.8206,0.6487,0.5367,0.7913,0.7134,0.5436,0.0001000,18974,,,\n", + "152,0.8158,0.5808,0.5085,0.7944,0.7252,0.5422,0.0001000,19094,,,\n", + "153,0.8155,0.6353,0.5220,0.7955,0.7125,0.5403,0.0001000,19213,,,\n", + "154,0.8141,0.5835,0.4903,0.7915,0.7244,0.5390,0.0001000,19328,,,\n", + "155,0.8155,0.6529,0.4980,0.7880,0.7275,0.5457,0.0001000,19442,,,\n", + "156,0.8175,0.6162,0.5213,0.7988,0.7175,0.5310,0.0001000,19559,,,\n", + "157,0.8194,0.6496,0.4865,0.7967,0.7109,0.5437,0.0001000,19676,,,\n", + "158,0.8181,0.5566,0.4683,0.7919,0.7288,0.5446,0.0001000,19794,,,\n", + "159,0.8188,0.5272,0.4342,0.7917,0.7088,0.5432,0.0001000,19911,,,\n", + "160,0.8185,0.4921,0.4288,0.7972,0.7089,0.5297,0.0001000,20029,0.7972,0.7089,0.5297\n", + "161,0.8184,0.6232,0.5272,0.7926,0.7183,0.5515,0.0001000,20144,,,\n", + "162,0.8208,0.5497,0.4534,0.7936,0.7128,0.5385,0.0001000,20259,,,\n", + "163,0.8177,0.7209,0.5504,0.7955,0.7075,0.5349,0.0001000,20373,,,\n", + "164,0.8172,0.6498,0.5546,0.7955,0.7022,0.5397,0.0001000,20489,,,\n", + "165,0.8189,0.5270,0.4338,0.7976,0.6956,0.5336,0.0001000,20602,,,\n", + "166,0.8182,0.5820,0.4901,0.7957,0.6979,0.5380,0.0001000,20718,,,\n", + "167,0.8250,0.5280,0.4309,0.7967,0.6988,0.5216,0.0001000,20835,,,\n", + "168,0.8203,0.6312,0.5130,0.7951,0.6966,0.5362,0.0001000,20950,,,\n", + "169,0.8210,0.5634,0.4779,0.7959,0.7018,0.5463,0.0001000,21065,,,\n", + "170,0.8175,0.6711,0.5615,0.7990,0.6888,0.5372,0.0001000,21188,0.7990,0.6888,0.5372\n", + "171,0.8196,0.6884,0.5037,0.7970,0.6941,0.5337,0.0001000,21308,,,\n", + "172,0.8211,0.5063,0.4090,0.7995,0.6851,0.5241,0.0001000,21432,,,\n", + "173,0.8234,0.8045,0.6350,0.7997,0.6834,0.5250,0.0001000,21553,,,\n", + "174,0.8231,0.6844,0.5386,0.7974,0.6889,0.5303,0.0001000,21670,,,\n", + "175,0.8262,0.6262,0.4756,0.7972,0.6860,0.5231,0.0001000,21786,,,\n", + "176,0.8264,0.5495,0.4548,0.7986,0.6921,0.5335,0.0001000,21902,,,\n", + "177,0.8261,0.6295,0.5156,0.7995,0.6854,0.5280,0.0001000,22022,,,\n", + "178,0.8261,0.5477,0.4553,0.7976,0.6900,0.5284,0.0001000,22141,,,\n", + "179,0.8237,0.5988,0.4727,0.7955,0.6964,0.5320,0.0001000,22260,,,\n", + "180,0.8310,0.4767,0.4025,0.7953,0.6856,0.5153,0.0001000,22380,0.7953,0.6856,0.5153\n", + "181,0.8215,0.5333,0.4552,0.8036,0.6752,0.5308,0.0001000,22498,,,\n", + "182,0.8222,0.6722,0.5057,0.7995,0.6945,0.5217,0.0001000,22613,,,\n", + "183,0.8309,0.4939,0.4069,0.7999,0.6965,0.5167,0.0001000,22727,,,\n", + "184,0.8252,0.5866,0.4709,0.8005,0.6781,0.5217,0.0001000,22844,,,\n", + "185,0.8291,0.6055,0.4549,0.8011,0.6831,0.5164,0.0001000,22970,,,\n", + "186,0.8281,0.6642,0.5184,0.7961,0.6931,0.5223,0.0001000,23094,,,\n", + "187,0.8251,0.5440,0.4554,0.8001,0.6822,0.5308,0.0001000,23221,,,\n", + "188,0.8359,0.5585,0.4848,0.8026,0.6728,0.5170,0.0001000,23346,,,\n", + "189,0.8252,0.5356,0.4748,0.7949,0.6783,0.5281,0.0001000,23467,,,\n", + "190,0.8428,0.3153,0.3176,0.7978,0.6776,0.5093,0.0001000,23588,0.7978,0.6776,0.5093\n", + "191,0.8233,0.6171,0.4608,0.8003,0.6840,0.5241,0.0001000,23707,,,\n", + "192,0.8226,0.5858,0.4761,0.8003,0.6834,0.5204,0.0001000,23823,,,\n", + "193,0.8262,0.4785,0.4299,0.7997,0.6780,0.5133,0.0001000,23939,,,\n", + "194,0.8255,0.6073,0.5005,0.8022,0.6794,0.5304,0.0001000,24057,,,\n", + "195,0.8247,0.6735,0.4892,0.8036,0.6832,0.5112,0.0001000,24178,,,\n", + "196,0.8244,0.5952,0.4584,0.8018,0.6784,0.5260,0.0001000,24298,,,\n", + "197,0.8341,0.6987,0.5292,0.8015,0.6784,0.5046,0.0001000,24417,,,\n", + "198,0.8351,0.4549,0.3953,0.8036,0.6882,0.5168,0.0001000,24534,,,\n", + "199,0.8275,0.5954,0.4897,0.8026,0.6767,0.5210,0.0001000,24652,,,\n", + "200,0.8249,0.6425,0.5010,0.8040,0.6614,0.5133,0.0001000,24774,0.8040,0.6614,0.5133\n", + "201,0.8264,0.5343,0.4451,0.7992,0.6818,0.5112,0.0001000,24888,,,\n", + "202,0.8295,0.5845,0.4727,0.8020,0.6715,0.5205,0.0001000,25002,,,\n", + "203,0.8288,0.6379,0.4795,0.8040,0.6656,0.5214,0.0001000,25116,,,\n", + "204,0.8320,0.4398,0.4114,0.8003,0.6668,0.4981,0.0001000,25233,,,\n", + "205,0.8340,0.4568,0.4285,0.8020,0.6682,0.5095,0.0001000,25355,,,\n", + "206,0.8320,0.5378,0.4612,0.8026,0.6586,0.5104,0.0001000,25476,,,\n", + "207,0.8420,0.5497,0.4281,0.8047,0.6673,0.4956,0.0001000,25598,,,\n", + "208,0.8396,0.4748,0.4135,0.8059,0.6559,0.4961,0.0001000,25715,,,\n", + "209,0.8297,0.5481,0.4546,0.8074,0.6593,0.5046,0.0001000,25832,,,\n", + "210,0.8272,0.5788,0.4630,0.8051,0.6725,0.5136,0.0001000,25952,0.8051,0.6725,0.5136\n", + "211,0.8355,0.5548,0.4536,0.8015,0.6770,0.5134,0.0001000,26068,,,\n", + "212,0.8337,0.5962,0.4599,0.8063,0.6646,0.5211,0.0001000,26185,,,\n", + "213,0.8337,0.4865,0.4379,0.8084,0.6610,0.5130,0.0001000,26304,,,\n", + "214,0.8269,0.5016,0.4448,0.8068,0.6541,0.5140,0.0001000,26420,,,\n", + "215,0.8346,0.5462,0.4786,0.8003,0.6646,0.5181,0.0001000,26540,,,\n", + "216,0.8306,0.5921,0.4713,0.8063,0.6576,0.5136,0.0001000,26656,,,\n", + "217,0.8379,0.4336,0.3843,0.8088,0.6578,0.4919,0.0001000,26774,,,\n", + "218,0.8355,0.5525,0.4282,0.8038,0.6620,0.5060,0.0001000,26890,,,\n", + "219,0.8335,0.5708,0.4525,0.8088,0.6513,0.5045,0.0001000,27005,,,\n", + "220,0.8475,0.5008,0.4229,0.8080,0.6588,0.4979,0.0001000,27122,0.8080,0.6588,0.4979\n", + "221,0.8431,0.6584,0.4724,0.8049,0.6608,0.4949,0.0001000,27242,,,\n", + "222,0.8309,0.4475,0.4221,0.8051,0.6585,0.5049,0.0001000,27358,,,\n", + "223,0.8312,0.6285,0.5237,0.8099,0.6503,0.4982,0.0001000,27482,,,\n", + "224,0.8339,0.5562,0.5015,0.8063,0.6455,0.5045,0.0001000,27606,,,\n", + "225,0.8348,0.5046,0.4077,0.8013,0.6525,0.4977,0.0001000,27731,,,\n", + "226,0.8460,0.4947,0.4337,0.8053,0.6556,0.4938,0.0001000,27857,,,\n", + "227,0.8388,0.5965,0.4897,0.8070,0.6482,0.4958,0.0001000,27978,,,\n", + "228,0.8332,0.6027,0.4821,0.8097,0.6387,0.5085,0.0001000,28097,,,\n", + "229,0.8389,0.5221,0.4699,0.8070,0.6488,0.4980,0.0001000,28222,,,\n", + "230,0.8395,0.3864,0.3714,0.8061,0.6509,0.4941,0.0001000,28350,0.8061,0.6509,0.4941\n", + "231,0.8354,0.5741,0.4649,0.8057,0.6442,0.5144,0.0001000,28475,,,\n", + "232,0.8454,0.4754,0.4407,0.8084,0.6406,0.4871,0.0001000,28598,,,\n", + "233,0.8334,0.5446,0.4487,0.8095,0.6440,0.4950,0.0001000,28718,,,\n", + "234,0.8475,0.5849,0.4615,0.8043,0.6538,0.4866,0.0001000,28842,,,\n", + "235,0.8322,0.6948,0.5291,0.8045,0.6441,0.5032,0.0001000,28961,,,\n", + "236,0.8332,0.5247,0.4570,0.8093,0.6433,0.4987,0.0001000,29080,,,\n", + "237,0.8320,0.5450,0.4669,0.8128,0.6374,0.5004,0.0001000,29202,,,\n", + "238,0.8440,0.4666,0.3922,0.8088,0.6414,0.4979,0.0001000,29323,,,\n", + "239,0.8327,0.5488,0.4405,0.8074,0.6477,0.5064,0.0001000,29448,,,\n", + "240,0.8314,0.6070,0.4707,0.8068,0.6444,0.5036,0.0001000,29567,0.8068,0.6444,0.5036\n", + "241,0.8347,0.4323,0.4300,0.8063,0.6493,0.5067,0.0001000,29682,,,\n", + "242,0.8323,0.5215,0.4582,0.8082,0.6358,0.4985,0.0001000,29795,,,\n", + "243,0.8405,0.4015,0.3709,0.8130,0.6421,0.4868,0.0001000,29912,,,\n", + "244,0.8353,0.5290,0.4350,0.8116,0.6434,0.4921,0.0001000,30029,,,\n", + "245,0.8467,0.3699,0.3779,0.8095,0.6508,0.4901,0.0001000,30144,,,\n", + "246,0.8436,0.5215,0.4294,0.8080,0.6385,0.4921,0.0001000,30259,,,\n", + "247,0.8309,0.5858,0.4545,0.8120,0.6370,0.4986,0.0001000,30376,,,\n", + "248,0.8487,0.4616,0.4118,0.8091,0.6389,0.4869,0.0001000,30490,,,\n", + "249,0.8391,0.4873,0.4129,0.8095,0.6311,0.4959,0.0001000,30602,,,\n", + "250,0.8472,0.4277,0.3893,0.8122,0.6456,0.4839,0.0001000,30720,0.8122,0.6456,0.4839\n", + "251,0.8467,0.5004,0.3884,0.8076,0.6386,0.4940,0.0001000,30835,,,\n", + "252,0.8323,0.5448,0.4438,0.8074,0.6403,0.4999,0.0001000,30951,,,\n", + "253,0.8406,0.4536,0.4025,0.8063,0.6396,0.4953,0.0001000,31069,,,\n", + "254,0.8339,0.3931,0.3703,0.8141,0.6212,0.4914,0.0001000,31188,,,\n", + "255,0.8425,0.3777,0.3449,0.8091,0.6422,0.4791,0.0001000,31307,,,\n", + "256,0.8369,0.5319,0.4436,0.8124,0.6240,0.4958,0.0001000,31425,,,\n", + "257,0.8430,0.5262,0.4611,0.8143,0.6306,0.4920,0.0001000,31545,,,\n", + "258,0.8335,0.6729,0.4956,0.8084,0.6345,0.4941,0.0001000,31659,,,\n", + "259,0.8361,0.5056,0.4026,0.8145,0.6402,0.4891,0.0001000,31773,,,\n", + "260,0.8320,0.5585,0.4824,0.8093,0.6156,0.4886,0.0001000,31885,0.8093,0.6156,0.4886\n", + "261,0.8362,0.5381,0.4480,0.8109,0.6282,0.4984,0.0001000,31995,,,\n", + "262,0.8426,0.6025,0.4902,0.8105,0.6361,0.4874,0.0001000,32105,,,\n", + "263,0.8324,0.5826,0.4961,0.8132,0.6198,0.4862,0.0001000,32222,,,\n", + "264,0.8387,0.6445,0.5167,0.8074,0.6311,0.5059,0.0001000,32333,,,\n", + "265,0.8343,0.5738,0.4614,0.8101,0.6327,0.4928,0.0001000,32444,,,\n", + "266,0.8353,0.6465,0.4903,0.8143,0.6393,0.4852,0.0001000,32554,,,\n", + "267,0.8350,0.6780,0.5080,0.8122,0.6329,0.4922,0.0001000,32665,,,\n", + "268,0.8370,0.5225,0.4424,0.8105,0.6271,0.4926,0.0001000,32776,,,\n", + "269,0.8361,0.5011,0.4317,0.8159,0.6225,0.4897,0.0001000,32889,,,\n", + "270,0.8366,0.5111,0.4312,0.8168,0.6189,0.4960,0.0001000,33005,0.8168,0.6189,0.4960\n", + "271,0.8454,0.5163,0.4469,0.8130,0.6308,0.4790,0.0001000,33116,,,\n", + "272,0.8349,0.7337,0.5515,0.8153,0.6187,0.4833,0.0001000,33227,,,\n", + "273,0.8364,0.5419,0.4158,0.8097,0.6337,0.4898,0.0001000,33339,,,\n", + "274,0.8382,0.4944,0.4117,0.8103,0.6169,0.4884,0.0001000,33450,,,\n", + "275,0.8452,0.4003,0.3682,0.8074,0.6306,0.4880,0.0001000,33559,,,\n", + "276,0.8473,0.4469,0.4039,0.8174,0.6199,0.4683,0.0001000,33672,,,\n", + "277,0.8428,0.4360,0.3435,0.8078,0.6255,0.4878,0.0001000,33781,,,\n", + "278,0.8380,0.4272,0.3656,0.8128,0.6334,0.4873,0.0001000,33890,,,\n", + "279,0.8406,0.6571,0.4914,0.8159,0.6283,0.4863,0.0001000,34000,,,\n", + "280,0.8438,0.4342,0.4112,0.8155,0.6285,0.4641,0.0001000,34112,0.8155,0.6285,0.4641\n", + "281,0.8383,0.5665,0.5015,0.8109,0.6229,0.4879,0.0001000,34224,,,\n", + "282,0.8533,0.4489,0.3885,0.8193,0.6322,0.4754,0.0001000,34338,,,\n", + "283,0.8368,0.4898,0.4544,0.8130,0.6188,0.4950,0.0001000,34450,,,\n", + "284,0.8477,0.5733,0.4162,0.8120,0.6302,0.4843,0.0001000,34560,,,\n", + "285,0.8393,0.4163,0.3607,0.8157,0.6091,0.4761,0.0001000,34670,,,\n", + "286,0.8466,0.5268,0.4107,0.8105,0.6268,0.4875,0.0001000,34780,,,\n", + "287,0.8365,0.4204,0.3726,0.8159,0.6268,0.4776,0.0001000,34893,,,\n", + "288,0.8385,0.4381,0.3883,0.8111,0.6219,0.4877,0.0001000,35002,,,\n", + "289,0.8406,0.5725,0.4116,0.8162,0.6143,0.4885,0.0001000,35113,,,\n", + "290,0.8489,0.5589,0.4380,0.8153,0.6257,0.4786,0.0001000,35225,0.8153,0.6257,0.4786\n", + "291,0.8380,0.5050,0.4239,0.8149,0.6152,0.4906,0.0001000,35334,,,\n", + "292,0.8481,0.4847,0.4284,0.8159,0.6165,0.4782,0.0001000,35446,,,\n", + "293,0.8543,0.5539,0.4552,0.8109,0.6311,0.4710,0.0001000,35554,,,\n", + "294,0.8422,0.5566,0.4434,0.8149,0.6185,0.4822,0.0001000,35664,,,\n", + "295,0.8402,0.4851,0.4017,0.8147,0.6225,0.4832,0.0001000,35772,,,\n", + "296,0.8403,0.7517,0.5071,0.8162,0.6270,0.4851,0.0001000,35880,,,\n", + "297,0.8475,0.5178,0.4517,0.8139,0.6257,0.4787,0.0001000,35990,,,\n", + "298,0.8476,0.4834,0.4273,0.8126,0.6249,0.4815,0.0001000,36100,,,\n", + "299,0.8403,0.4237,0.3832,0.8141,0.6234,0.4821,0.0001000,36208,,,\n", + "300,0.8391,0.4816,0.4175,0.8162,0.6200,0.4843,0.0001000,36319,0.8162,0.6200,0.4843\n", + "301,0.8395,0.5736,0.4831,0.8124,0.6195,0.4817,0.0001000,36428,,,\n", + "302,0.8396,0.4742,0.4227,0.8128,0.6148,0.4774,0.0001000,36536,,,\n", + "303,0.8395,0.5292,0.4540,0.8151,0.6207,0.4833,0.0001000,36651,,,\n", + "304,0.8508,0.4826,0.4140,0.8151,0.6240,0.4643,0.0001000,36767,,,\n", + "305,0.8438,0.5594,0.4303,0.8141,0.6222,0.4866,0.0001000,36880,,,\n", + "306,0.8562,0.4581,0.4013,0.8107,0.6220,0.4797,0.0001000,36993,,,\n", + "307,0.8410,0.5766,0.4537,0.8122,0.6156,0.4789,0.0001000,37106,,,\n", + "308,0.8589,0.4304,0.3926,0.8166,0.6149,0.4652,0.0001000,37216,,,\n", + "309,0.8526,0.5134,0.4365,0.8139,0.6192,0.4765,0.0001000,37327,,,\n", + "310,0.8506,0.5492,0.4229,0.8149,0.6156,0.4730,0.0001000,37439,0.8149,0.6156,0.4730\n", + "311,0.8550,0.5379,0.4811,0.8143,0.6102,0.4740,0.0001000,37548,,,\n", + "312,0.8403,0.5111,0.4143,0.8128,0.6220,0.4831,0.0001000,37657,,,\n", + "313,0.8516,0.4165,0.3550,0.8128,0.6183,0.4551,0.0001000,37766,,,\n", + "314,0.8427,0.4845,0.4204,0.8180,0.6092,0.4752,0.0001000,37873,,,\n", + "315,0.8480,0.5038,0.4138,0.8187,0.6102,0.4619,0.0001000,37981,,,\n", + "316,0.8435,0.5842,0.4545,0.8193,0.6112,0.4715,0.0001000,38091,,,\n", + "317,0.8416,0.4900,0.4117,0.8210,0.6025,0.4800,0.0001000,38203,,,\n", + "318,0.8400,0.5337,0.4574,0.8195,0.6017,0.4774,0.0001000,38312,,,\n", + "319,0.8428,0.4993,0.4414,0.8205,0.6054,0.4765,0.0001000,38424,,,\n", + "320,0.8537,0.3848,0.3327,0.8187,0.6141,0.4520,0.0001000,38536,0.8187,0.6141,0.4520\n", + "321,0.8653,0.5875,0.4280,0.8147,0.6173,0.4595,0.0001000,38646,,,\n", + "322,0.8591,0.4434,0.3836,0.8166,0.6049,0.4652,0.0001000,38757,,,\n", + "323,0.8504,0.4869,0.4052,0.8172,0.6076,0.4658,0.0001000,38866,,,\n", + "324,0.8591,0.4338,0.3905,0.8174,0.6072,0.4641,0.0001000,38977,,,\n", + "325,0.8423,0.5138,0.4031,0.8128,0.6160,0.4782,0.0001000,39087,,,\n", + "326,0.8394,0.4462,0.3523,0.8191,0.6005,0.4692,0.0001000,39196,,,\n", + "327,0.8531,0.4444,0.3917,0.8170,0.6112,0.4678,0.0001000,39305,,,\n", + "328,0.8424,0.5155,0.4248,0.8216,0.6133,0.4786,0.0001000,39417,,,\n", + "329,0.8399,0.5879,0.4308,0.8136,0.6100,0.4782,0.0001000,39529,,,\n", + "330,0.8471,0.5874,0.4417,0.8195,0.5959,0.4689,0.0001000,39641,0.8195,0.5959,0.4689\n", + "331,0.8435,0.5248,0.4128,0.8207,0.6099,0.4670,0.0001000,39750,,,\n", + "332,0.8649,0.2951,0.3041,0.8134,0.6152,0.4576,0.0001000,39857,,,\n", + "333,0.8438,0.5974,0.4786,0.8170,0.6118,0.4859,0.0001000,39966,,,\n", + "334,0.8412,0.5696,0.4831,0.8168,0.5999,0.4754,0.0001000,40073,,,\n", + "335,0.8598,0.4746,0.4192,0.8199,0.6060,0.4625,0.0001000,40183,,,\n", + "336,0.8674,0.4496,0.3679,0.8197,0.6174,0.4508,0.0001000,40295,,,\n", + "337,0.8448,0.5243,0.4365,0.8168,0.6056,0.4757,0.0001000,40405,,,\n", + "338,0.8535,0.3596,0.3291,0.8193,0.5935,0.4617,0.0001000,40512,,,\n", + "339,0.8511,0.3069,0.3081,0.8174,0.6080,0.4540,0.0001000,40620,,,\n", + "340,0.8647,0.3888,0.2967,0.8124,0.6101,0.4708,0.0001000,40732,0.8124,0.6101,0.4708\n", + "341,0.8452,0.4629,0.4096,0.8199,0.5958,0.4620,0.0001000,40837,,,\n", + "342,0.8486,0.4921,0.4153,0.8172,0.6066,0.4702,0.0001000,40945,,,\n", + "343,0.8430,0.4415,0.4087,0.8222,0.6032,0.4684,0.0001000,41053,,,\n", + "344,0.8453,0.4918,0.4027,0.8222,0.6052,0.4732,0.0001000,41161,,,\n", + "345,0.8636,0.5553,0.4430,0.8205,0.6054,0.4613,0.0001000,41270,,,\n", + "346,0.8503,0.5436,0.4557,0.8214,0.6039,0.4724,0.0001000,41377,,,\n", + "347,0.8423,0.5628,0.4942,0.8145,0.6062,0.4848,0.0001000,41485,,,\n", + "348,0.8605,0.4906,0.4097,0.8199,0.6061,0.4500,0.0001000,41593,,,\n", + "349,0.8470,0.4108,0.3734,0.8216,0.5875,0.4555,0.0001000,41701,,,\n", + "350,0.8455,0.4917,0.4074,0.8203,0.6058,0.4629,0.0001000,41811,0.8203,0.6058,0.4629\n", + "351,0.8514,0.5687,0.4349,0.8207,0.6049,0.4649,0.0001000,41918,,,\n", + "352,0.8465,0.5434,0.4479,0.8157,0.5983,0.4686,0.0001000,42026,,,\n", + "353,0.8440,0.5002,0.4044,0.8193,0.5953,0.4694,0.0001000,42136,,,\n", + "354,0.8585,0.3209,0.3174,0.8228,0.6069,0.4543,0.0001000,42247,,,\n", + "355,0.8464,0.4718,0.3865,0.8193,0.5960,0.4600,0.0001000,42354,,,\n", + "356,0.8413,0.5391,0.4585,0.8207,0.5881,0.4681,0.0001000,42463,,,\n", + "357,0.8464,0.4508,0.4035,0.8205,0.5936,0.4718,0.0001000,42570,,,\n", + "358,0.8475,0.5025,0.4231,0.8189,0.6014,0.4642,0.0001000,42678,,,\n", + "359,0.8537,0.2384,0.2709,0.8203,0.6031,0.4626,0.0001000,42784,,,\n", + "360,0.8513,0.5288,0.4397,0.8187,0.6005,0.4713,0.0001000,42892,0.8187,0.6005,0.4713\n", + "361,0.8595,0.5338,0.4289,0.8237,0.5966,0.4661,0.0001000,43001,,,\n", + "362,0.8499,0.5750,0.4432,0.8266,0.6008,0.4621,0.0001000,43110,,,\n", + "363,0.8578,0.4338,0.3899,0.8193,0.5935,0.4582,0.0001000,43217,,,\n", + "364,0.8649,0.4754,0.3858,0.8195,0.6046,0.4552,0.0001000,43326,,,\n", + "365,0.8767,0.4665,0.3884,0.8193,0.6089,0.4463,0.0001000,43436,,,\n", + "366,0.8549,0.4861,0.4202,0.8170,0.6031,0.4655,0.0001000,43545,,,\n", + "367,0.8473,0.4603,0.4098,0.8201,0.6014,0.4643,0.0001000,43652,,,\n", + "368,0.8543,0.3746,0.3757,0.8157,0.6098,0.4638,0.0001000,43760,,,\n", + "369,0.8508,0.4417,0.4150,0.8220,0.5997,0.4669,0.0001000,43869,,,\n", + "370,0.8682,0.4287,0.3961,0.8235,0.6045,0.4498,0.0001000,43979,0.8235,0.6045,0.4498\n", + "371,0.8456,0.4930,0.4108,0.8207,0.5956,0.4618,0.0001000,44086,,,\n", + "372,0.8439,0.4759,0.3955,0.8207,0.5914,0.4632,0.0001000,44191,,,\n", + "373,0.8519,0.6002,0.4553,0.8203,0.5976,0.4662,0.0001000,44297,,,\n", + "374,0.8525,0.4472,0.3795,0.8199,0.6021,0.4631,0.0001000,44404,,,\n", + "375,0.8444,0.5390,0.4684,0.8141,0.6044,0.4708,0.0001000,44510,,,\n", + "376,0.8686,0.3785,0.3471,0.8197,0.6038,0.4560,0.0001000,44615,,,\n", + "377,0.8461,0.6737,0.4773,0.8193,0.5896,0.4612,0.0001000,44722,,,\n", + "378,0.8451,0.4670,0.4159,0.8253,0.5887,0.4681,0.0001000,44827,,,\n", + "379,0.8633,0.3988,0.3372,0.8191,0.6072,0.4475,0.0001000,44933,,,\n", + "380,0.8479,0.5354,0.4509,0.8187,0.6021,0.4678,0.0001000,45041,0.8187,0.6021,0.4678\n", + "381,0.8469,0.4293,0.3688,0.8184,0.5907,0.4666,0.0001000,45147,,,\n", + "382,0.8662,0.5016,0.3909,0.8195,0.6022,0.4463,0.0001000,45253,,,\n", + "383,0.8722,0.4849,0.3958,0.8230,0.6031,0.4501,0.0001000,45360,,,\n", + "384,0.8473,0.4905,0.3829,0.8141,0.5886,0.4691,0.0001000,45467,,,\n", + "385,0.8585,0.5282,0.3877,0.8268,0.5923,0.4550,0.0001000,45576,,,\n", + "386,0.8611,0.4760,0.4149,0.8210,0.5906,0.4636,0.0001000,45686,,,\n", + "387,0.8548,0.4999,0.4187,0.8214,0.5855,0.4628,0.0001000,45793,,,\n", + "388,0.8449,0.4127,0.3707,0.8235,0.5854,0.4677,0.0001000,45900,,,\n", + "389,0.8742,0.3595,0.3659,0.8201,0.6067,0.4384,0.0001000,46008,,,\n", + "390,0.8501,0.4621,0.4037,0.8201,0.5946,0.4681,0.0001000,46118,0.8201,0.5946,0.4681\n", + "391,0.8455,0.4386,0.3998,0.8218,0.5843,0.4625,0.0001000,46226,,,\n", + "392,0.8444,0.5802,0.4465,0.8207,0.5877,0.4576,0.0001000,46332,,,\n", + "393,0.8423,0.5327,0.4121,0.8239,0.5837,0.4695,0.0001000,46436,,,\n", + "394,0.8692,0.4741,0.3980,0.8218,0.5994,0.4448,0.0001000,46542,,,\n", + "395,0.8540,0.3912,0.3705,0.8201,0.5882,0.4635,0.0001000,46648,,,\n", + "396,0.8636,0.4181,0.3627,0.8237,0.5847,0.4700,0.0001000,46754,,,\n", + "397,0.8492,0.5244,0.4349,0.8168,0.6045,0.4698,0.0001000,46860,,,\n", + "398,0.8454,0.4771,0.3867,0.8260,0.5818,0.4555,0.0001000,46966,,,\n", + "399,0.8437,0.4256,0.3789,0.8251,0.5857,0.4743,0.0001000,47073,,,\n", + "400,0.8444,0.5318,0.4777,0.8268,0.5787,0.4588,0.0001000,47179,0.8268,0.5787,0.4588\n", + "401,0.8527,0.5132,0.4390,0.8218,0.5802,0.4684,0.0001000,47284,,,\n", + "402,0.8667,0.4799,0.3567,0.8253,0.5881,0.4601,0.0001000,47392,,,\n", + "403,0.8504,0.4325,0.3807,0.8226,0.5891,0.4613,0.0001000,47499,,,\n", + "404,0.8448,0.5650,0.4856,0.8260,0.5803,0.4599,0.0001000,47605,,,\n", + "405,0.8687,0.3857,0.3616,0.8258,0.5845,0.4464,0.0001000,47712,,,\n", + "406,0.8493,0.5225,0.4419,0.8205,0.5860,0.4634,0.0001000,47817,,,\n", + "407,0.8481,0.4358,0.3605,0.8274,0.5754,0.4573,0.0001000,47925,,,\n", + "408,0.8512,0.4472,0.3731,0.8237,0.5866,0.4598,0.0001000,48031,,,\n", + "409,0.8617,0.4750,0.4109,0.8199,0.5913,0.4651,0.0001000,48138,,,\n", + "410,0.8517,0.4053,0.3415,0.8243,0.5750,0.4570,0.0001000,48247,0.8243,0.5750,0.4570\n", + "411,0.8684,0.3384,0.3224,0.8258,0.5781,0.4453,0.0001000,48355,,,\n", + "412,0.8803,0.4318,0.3580,0.8191,0.5842,0.4554,0.0001000,48463,,,\n", + "413,0.8506,0.4548,0.4168,0.8255,0.5744,0.4619,0.0001000,48569,,,\n", + "414,0.8497,0.3886,0.3433,0.8268,0.5715,0.4600,0.0001000,48675,,,\n", + "415,0.8617,0.5295,0.4092,0.8247,0.5731,0.4536,0.0001000,48781,,,\n", + "416,0.8535,0.5655,0.4533,0.8220,0.5692,0.4609,0.0001000,48886,,,\n", + "417,0.8671,0.4665,0.4157,0.8212,0.5714,0.4502,0.0001000,48993,,,\n", + "418,0.8883,0.3943,0.3477,0.8187,0.6045,0.4340,0.0001000,49101,,,\n", + "419,0.8590,0.4782,0.3906,0.8216,0.5748,0.4555,0.0001000,49211,,,\n", + "420,0.8775,0.2739,0.2572,0.8255,0.5941,0.4256,0.0001000,49321,0.8255,0.5941,0.4256\n", + "421,0.8794,0.3643,0.3280,0.8235,0.5872,0.4425,0.0001000,49429,,,\n", + "422,0.8559,0.3929,0.3475,0.8245,0.5859,0.4594,0.0001000,49535,,,\n", + "423,0.8514,0.4401,0.3704,0.8251,0.5848,0.4478,0.0001000,49640,,,\n", + "424,0.8698,0.4712,0.4124,0.8230,0.5811,0.4548,0.0001000,49745,,,\n", + "425,0.8578,0.5701,0.4529,0.8249,0.5770,0.4593,0.0001000,49850,,,\n", + "426,0.8467,0.5252,0.4163,0.8266,0.5725,0.4566,0.0001000,49954,,,\n", + "427,0.8449,0.5043,0.4232,0.8239,0.5816,0.4517,0.0001000,50059,,,\n", + "428,0.8798,0.3462,0.3252,0.8268,0.5949,0.4409,0.0001000,50165,,,\n", + "429,0.8572,0.4844,0.3740,0.8253,0.5745,0.4537,0.0001000,50270,,,\n", + "430,0.8482,0.5048,0.4125,0.8268,0.5761,0.4618,0.0001000,50377,0.8268,0.5761,0.4618\n", + "431,0.8473,0.5265,0.4052,0.8278,0.5766,0.4523,0.0001000,50483,,,\n", + "432,0.8655,0.4760,0.3692,0.8245,0.5965,0.4515,0.0001000,50589,,,\n", + "433,0.8486,0.3953,0.3577,0.8253,0.5823,0.4606,0.0001000,50695,,,\n", + "434,0.8578,0.3709,0.3448,0.8255,0.5900,0.4450,0.0001000,50801,,,\n", + "435,0.8528,0.4494,0.3878,0.8226,0.5754,0.4568,0.0001000,50909,,,\n", + "436,0.8480,0.4594,0.3902,0.8232,0.5898,0.4601,0.0001000,51018,,,\n", + "437,0.8495,0.4737,0.4130,0.8253,0.5806,0.4584,0.0001000,51121,,,\n", + "438,0.8464,0.5284,0.4245,0.8260,0.5761,0.4587,0.0001000,51223,,,\n", + "439,0.8531,0.3779,0.3465,0.8270,0.5710,0.4547,0.0001000,51328,,,\n", + "440,0.8481,0.5490,0.4630,0.8262,0.5737,0.4580,0.0001000,51434,0.8262,0.5737,0.4580\n", + "441,0.8697,0.5039,0.4301,0.8276,0.5804,0.4428,0.0001000,51537,,,\n", + "442,0.8676,0.4032,0.3744,0.8253,0.5727,0.4437,0.0001000,51643,,,\n", + "443,0.8631,0.5097,0.4059,0.8239,0.5792,0.4563,0.0001000,51748,,,\n", + "444,0.8698,0.2813,0.2697,0.8251,0.6028,0.4387,0.0001000,51851,,,\n", + "445,0.8832,0.2590,0.2701,0.8247,0.6014,0.4398,0.0001000,51951,,,\n", + "446,0.8721,0.2815,0.2892,0.8241,0.5869,0.4404,0.0001000,52048,,,\n", + "447,0.8698,0.3522,0.3130,0.8216,0.5923,0.4485,0.0001000,52155,,,\n", + "448,0.8822,0.5504,0.3981,0.8182,0.6116,0.4331,0.0001000,52262,,,\n", + "449,0.8512,0.4343,0.3344,0.8226,0.5838,0.4448,0.0001000,52365,,,\n", + "450,0.8814,0.4329,0.3688,0.8199,0.5945,0.4438,0.0001000,52473,0.8199,0.5945,0.4438\n", + "451,0.8948,0.3125,0.2959,0.8228,0.6180,0.4330,0.0001000,52583,,,\n", + "452,0.8485,0.4883,0.4380,0.8195,0.5857,0.4622,0.0001000,52689,,,\n", + "453,0.8969,0.3810,0.3406,0.8172,0.6210,0.4332,0.0001000,52795,,,\n", + "454,0.8667,0.4746,0.3934,0.8212,0.5839,0.4578,0.0001000,52903,,,\n", + "455,0.8494,0.5162,0.3620,0.8218,0.5801,0.4555,0.0001000,53005,,,\n", + "456,0.8483,0.5452,0.4447,0.8230,0.5856,0.4576,0.0001000,53108,,,\n", + "457,0.8692,0.4101,0.3650,0.8235,0.5852,0.4480,0.0001000,53213,,,\n", + "458,0.8572,0.3934,0.3490,0.8260,0.5875,0.4343,0.0001000,53315,,,\n", + "459,0.8559,0.2868,0.2876,0.8228,0.5787,0.4410,0.0001000,53418,,,\n", + "460,0.8733,0.4530,0.3679,0.8245,0.5747,0.4424,0.0001000,53525,0.8245,0.5747,0.4424\n", + "461,0.8776,0.4023,0.3721,0.8241,0.5805,0.4325,0.0001000,53632,,,\n", + "462,0.8509,0.4839,0.4215,0.8258,0.5797,0.4515,0.0001000,53737,,,\n", + "463,0.8457,0.3734,0.3658,0.8210,0.5684,0.4600,0.0001000,53842,,,\n", + "464,0.8506,0.4154,0.3580,0.8251,0.5730,0.4495,0.0001000,53946,,,\n", + "465,0.8608,0.4702,0.3885,0.8241,0.5739,0.4522,0.0001000,54051,,,\n", + "466,0.8490,0.5019,0.4156,0.8270,0.5621,0.4530,0.0001000,54154,,,\n", + "467,0.8473,0.6810,0.4770,0.8280,0.5717,0.4625,0.0001000,54259,,,\n", + "468,0.8479,0.4491,0.3894,0.8243,0.5745,0.4497,0.0001000,54363,,,\n", + "469,0.8627,0.4623,0.3514,0.8266,0.5820,0.4536,0.0001000,54469,,,\n", + "470,0.8519,0.4979,0.3938,0.8270,0.5703,0.4518,0.0001000,54575,0.8270,0.5703,0.4518\n", + "471,0.8722,0.4964,0.4141,0.8243,0.5840,0.4397,0.0001000,54680,,,\n", + "472,0.8526,0.4803,0.3883,0.8243,0.5745,0.4455,0.0001000,54784,,,\n", + "473,0.8532,0.4760,0.3755,0.8260,0.5736,0.4389,0.0001000,54887,,,\n", + "474,0.8494,0.5371,0.4355,0.8241,0.5802,0.4552,0.0001000,54988,,,\n", + "475,0.8537,0.5074,0.4072,0.8249,0.5879,0.4605,0.0001000,55090,,,\n", + "476,0.8479,0.5766,0.4577,0.8258,0.5647,0.4543,0.0001000,55193,,,\n", + "477,0.8598,0.3962,0.3410,0.8235,0.5723,0.4474,0.0001000,55297,,,\n", + "478,0.8750,0.5077,0.4011,0.8228,0.5855,0.4376,0.0001000,55400,,,\n", + "479,0.8521,0.4600,0.4022,0.8239,0.5705,0.4493,0.0001000,55504,,,\n", + "480,0.8524,0.5375,0.4295,0.8278,0.5656,0.4468,0.0001000,55609,0.8278,0.5656,0.4468\n", + "481,0.8514,0.3852,0.3157,0.8274,0.5662,0.4515,0.0001000,55711,,,\n", + "482,0.8484,0.4176,0.3859,0.8280,0.5626,0.4452,0.0001000,55814,,,\n", + "483,0.8535,0.3734,0.3449,0.8276,0.5673,0.4503,0.0001000,55918,,,\n", + "484,0.8513,0.6021,0.4640,0.8247,0.5804,0.4534,0.0001000,56021,,,\n", + "485,0.8526,0.4174,0.3500,0.8280,0.5672,0.4493,0.0001000,56123,,,\n", + "486,0.8475,0.5454,0.4098,0.8253,0.5700,0.4520,0.0001000,56226,,,\n", + "487,0.8518,0.4307,0.3710,0.8230,0.5741,0.4515,0.0001000,56328,,,\n", + "488,0.8653,0.4962,0.4005,0.8237,0.5699,0.4503,0.0001000,56435,,,\n", + "489,0.8805,0.4241,0.3485,0.8201,0.6007,0.4420,0.0001000,56542,,,\n", + "490,0.8577,0.5595,0.4285,0.8285,0.5652,0.4476,0.0001000,56650,0.8285,0.5652,0.4476\n", + "491,0.8801,0.3195,0.3213,0.8266,0.5770,0.4370,0.0001000,56755,,,\n", + "492,0.8680,0.2876,0.3086,0.8253,0.5753,0.4299,0.0001000,56858,,,\n", + "493,0.8530,0.4390,0.3866,0.8258,0.5802,0.4492,0.0001000,56962,,,\n", + "494,0.8593,0.4813,0.4084,0.8235,0.5730,0.4482,0.0001000,57067,,,\n", + "495,0.8631,0.2976,0.3094,0.8228,0.5732,0.4339,0.0001000,57170,,,\n", + "496,0.8517,0.4239,0.3813,0.8226,0.5687,0.4526,0.0001000,57273,,,\n", + "497,0.8643,0.4731,0.4077,0.8237,0.5680,0.4519,0.0001000,57377,,,\n", + "498,0.8502,0.5058,0.4193,0.8278,0.5637,0.4492,0.0001000,57479,,,\n", + "499,0.8659,0.5238,0.4276,0.8283,0.5662,0.4524,0.0001000,57577,,,\n", + "500,0.8555,0.5033,0.4300,0.8266,0.5683,0.4480,0.0001000,57670,0.8266,0.5683,0.4480\n", + "FINAL TEST,,,,,,,,57673,0.8285,0.5652,0.4476\n" + ] + } + ], + "source": [ + "!cat autosave.csv" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "_HCwWUq3JV55", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "b87d815f-6a5b-4884-abf0-6c3e0ce8d848" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + " adding: autosave.nn (deflated 59%)\n" + ] + } + ], + "source": [ + "!zip JP45F01 autosave.nn" + ] + } + ], + "metadata": { + "colab": { + "machine_shape": "hm", + "provenance": [] + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/examples/SimpleNLP/README.md b/examples/SimpleNLP/README.md new file mode 100644 index 00000000..f152bd52 --- /dev/null +++ b/examples/SimpleNLP/README.md @@ -0,0 +1,146 @@ +# Training a Simple Neural Network Model for Text Generation +This source code example shows a (hello world) small neural network trained on the [Tiny Stories dataset](https://huggingface.co/datasets/roneneldan/TinyStories). This code + +``` + WriteLn(GenerateStringFromChars(NFit.NN, 'once', FSampler),'.'); + WriteLn(GenerateStringFromChars(NFit.NN, 'one ', FSampler),'.'); +``` + +produces this output: +``` +once upon a time, there was a little girl named lily. she loved to play outside i. +one day, a little girl named lily was playing in her garden. she saw a big car wi. +``` + +You can find the raw training file and run by yourself at: +https://colab.research.google.com/github/joaopauloschuler/neural-api/blob/master/examples/SimpleNLP/NLP_CAI_TinyStories_Simple_Example.ipynb + +## Details +This source code above uses a neural network to guesses the next character in a string. +It downloads the [Tiny Stories dataset](https://huggingface.co/datasets/roneneldan/TinyStories) and trains a small Pascal written neural network model. The neural network model is built with: + +``` +const + csContextLen = 81; + csTrainingFileName = 'tinystories.txt'; + csVocabSize = 128; // Character based vocabulary/dictionary. + csMinSampleSize = 3; // Minimum of 3 characters. +... + FNN.AddLayer([ + TNNetInput.Create(csContextLen, 1, csVocabSize), + TNNetPointwiseConv.Create(32,1), + TNNetPadXY.Create(1,0), + TNNetConvolutionReLU.Create(64,3,0,1,1), + TNNetMaxPool.Create(3), + TNNetPadXY.Create(1,0), + TNNetConvolutionReLU.Create(128*3,3,0,1,1), + TNNetPointwiseConvReLU.Create(1024,0), + TNNetMaxPoolWithPosition.Create(27,27,0,1,0), + TNNetPointwiseConvReLU.Create(1024), + TNNetPointwiseConvReLU.Create(128), + TNNetFullConnectLinear.Create(csVocabSize), + TNNetSoftMax.Create() + ]); +``` + +This neural network has some characteristics: +* It’s character based. Therefore, there is no dictionary. The convolutional layers are responsible for learning the words. In the first epochs of the training, we can see that the neural network is learning the words. This architecture benefits from the small vocabulary found in the “Tiny Stories” dataset. +* It predicts the next character in an input sequence (or context). In this example, the context is 81 characters. +* There is no recursive computation. It’s a convolutional model. Therefore, it’s memory efficient and can be computed in a highly parallel environment. +* One of the max pooling layers inserts the positional information of the max values. +* In this particular example, it learns very well the [Tiny Stories dataset](https://huggingface.co/datasets/roneneldan/TinyStories). The very same model was used to train with wikipedia but wikipedia vocabulary and sentence structures are too complex for this small 2.8 million parameters model. You can just replace tinystories.txt and train it on your own text file (dataset). This source code is the “hello world” of the NLP. Don’t expect too much from it. + +In the case that you are curious, there are plenty of scientific studies supporting NLP with CNNs: +* https://aclanthology.org/W18-6127/ - Convolutions Are All You Need (For Classifying Character Sequences) +* https://arxiv.org/abs/1712.09662 - CNN Is All You Need +* https://arxiv.org/abs/1804.09541 - QANet: Combining Local Convolution with Global Self-Attention for Reading Comprehension +* https://aclanthology.org/N19-1407.pdf - Convolutional Self-Attention Networks +* https://arxiv.org/pdf/1805.08318.pdf - Self-Attention Generative Adversarial Networks + +## A Bit of the API Behind the Scenes +Samplers are used to probabilistically select the next token (character) from the probabilities guessed by the neural network. The Greedy, Top-K, and Top-P samplers provide different ways to predict the next character in a sequence. + +Greedy Sampling: +* Always selects the token with the highest probability at each step. +* Tends to produce repetitive and deterministic output. + +Top-K Sampling: +* Samples from the K most likely next tokens at each step. +* K is a parameter that controls diversity - a bigger K leads to more diverse results. + +Top-P Sampling: +* Samples from the smallest possible set of tokens whose cumulative probability exceeds P at each step. +* P is a parameter between 0 and 1 controlling diversity - lower P produces less diversity. + +In summary: +Greedy sampling takes the most likely token, leading to less diversity. Top-K and Top-P allow controlling diversity by adjusting their parameters. + +These samplers are available in plain pascal code: + +``` + { TNNetSamplerGreedy } + TNNetSamplerGreedy = class (TNNetSamplerBase) + public + function GetToken(Origin: TNNetVolume): integer; override; + end; + + { TNNetSamplerTopK } + TNNetSamplerTopK = class (TNNetSamplerBase) + protected + FTopK: integer; + public + constructor Create(TopK: integer); + function GetToken(Origin: TNNetVolume): integer; override; + end; + + { TNNetSamplerTopP } + TNNetSamplerTopP = class (TNNetSamplerBase) + protected + FTopP: TNeuralFloat; + public + constructor Create(TopP: TNeuralFloat); + function GetToken(Origin: TNNetVolume): integer; override; + end; +``` + +In this source code example, the sampler is created with `FSampler := TNNetSamplerTopP.Create(0.4);` + +Then, you can just call the following to see the magic: + +``` + WriteLn(GenerateStringFromChars(NFit.NN, 'once', FSampler),'.'); + WriteLn(GenerateStringFromChars(NFit.NN, 'one ', FSampler),'.'); +``` + +The loading and saving of neural networks (NN) can be done with: +``` + NN := TNNet.Create; + NN.LoadFromFile('MyTrainedNeuralNetwork.nn'); + NN.SaveToFile('MyTrainedNeuralNetwork.nn'); +``` + +A small chat bot can be coded with: + +``` +procedure TestFromFile; +var + S: string; + oSampler: TNNetSamplerBase; + NN: TNNet; +begin + oSampler := TNNetSamplerTopP.Create(0.6); + NN := TNNet.Create(); + WriteLn('Loading neural network.'); + NN.LoadFromFile(csAutosavedFileName); + NN.DebugStructure(); + WriteLn(); + WriteLn('Write something and I will reply.'); + repeat + Write('User: '); + ReadLn(S); + WriteLn('Neural network: ',GenerateStringFromChars(NN, LowerCase(S), oSampler),'.'); + until S = 'exit'; + NN.Free; + oSampler.Free; +end; +``` diff --git a/examples/SimpleNLP/SimpleNLP.lpi b/examples/SimpleNLP/SimpleNLP.lpi new file mode 100644 index 00000000..8bc4bd13 --- /dev/null +++ b/examples/SimpleNLP/SimpleNLP.lpi @@ -0,0 +1,170 @@ +<?xml version="1.0" encoding="UTF-8"?> +<CONFIG> + <ProjectOptions> + <Version Value="12"/> + <PathDelim Value="\"/> + <General> + <Flags> + <MainUnitHasCreateFormStatements Value="False"/> + <MainUnitHasTitleStatement Value="False"/> + <MainUnitHasScaledStatement Value="False"/> + <CompatibilityMode Value="True"/> + </Flags> + <SessionStorage Value="InProjectDir"/> + <Title Value="Simple NLP Example"/> + <UseAppBundle Value="False"/> + <ResourceType Value="res"/> + </General> + <i18n> + <EnableI18N LFM="False"/> + </i18n> + <BuildModes Count="3"> + <Item1 Name="Default" Default="True"/> + <Item2 Name="Debug"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\SimpleNLP"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <Parsing> + <SyntaxOptions> + <IncludeAssertionCode Value="True"/> + </SyntaxOptions> + </Parsing> + <CodeGeneration> + <Checks> + <IOChecks Value="True"/> + <RangeChecks Value="True"/> + <OverflowChecks Value="True"/> + <StackChecks Value="True"/> + </Checks> + <VerifyObjMethodCallValidity Value="True"/> + </CodeGeneration> + <Linking> + <Debugging> + <DebugInfoType Value="dsDwarf2Set"/> + <UseValgrind Value="True"/> + <UseExternalDbgSyms Value="True"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dDebug +-dAVX"/> + <OtherDefines Count="2"> + <Define0 Value="Debug"/> + <Define1 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item2> + <Item3 Name="Release"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\SimpleNLP"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <SmartLinkUnit Value="True"/> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + <DebugInfoType Value="dsDwarf2Set"/> + </Debugging> + <LinkSmart Value="True"/> + </Linking> + <Other> + <CustomOptions Value="-dRelease +-dAVX"/> + <OtherDefines Count="5"> + <Define0 Value="Release"/> + <Define1 Value="Debug"/> + <Define2 Value="CheckRange"/> + <Define3 Value="AVX2"/> + <Define4 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item3> + </BuildModes> + <PublishOptions> + <Version Value="2"/> + </PublishOptions> + <RunParams> + <FormatVersion Value="2"/> + <Modes Count="1"> + <Mode0 Name="default"/> + </Modes> + </RunParams> + <RequiredPackages Count="1"> + <Item1> + <PackageName Value="multithreadprocslaz"/> + </Item1> + </RequiredPackages> + <Units Count="1"> + <Unit0> + <Filename Value="SimpleNLP.lpr"/> + <IsPartOfProject Value="True"/> + <UnitName Value="smallLanguageModelConvFromFile"/> + </Unit0> + </Units> + </ProjectOptions> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\SimpleNLP"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\experiments\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dAVX +-dRelease"/> + <OtherDefines Count="2"> + <Define0 Value="AVX"/> + <Define1 Value="Release"/> + </OtherDefines> + </Other> + </CompilerOptions> + <Debugging> + <Exceptions Count="3"> + <Item1> + <Name Value="EAbort"/> + </Item1> + <Item2> + <Name Value="ECodetoolError"/> + </Item2> + <Item3> + <Name Value="EFOpenError"/> + </Item3> + </Exceptions> + </Debugging> +</CONFIG> diff --git a/examples/SimpleNLP/SimpleNLP.lpr b/examples/SimpleNLP/SimpleNLP.lpr new file mode 100644 index 00000000..9a02285c --- /dev/null +++ b/examples/SimpleNLP/SimpleNLP.lpr @@ -0,0 +1,209 @@ +program smallLanguageModelConvFromFile; +(* +Copyright (C) 2023 Joao Paulo Schwarz Schuler + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along +with this program; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +*) + +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, + neuralnetwork, + neuralvolume, + neuralfit, + neuralthread, + CustApp, + Math; + +const + csContextLen = 81; + csTrainingFileName = 'tinystories.txt'; + csVocabSize = 128; // Character based vocabulary/dictionary. + csMinSampleSize = 3; // Minimum of 3 characters. + +type + + { TTestFitLoading } + + TTestFitLoading = class(TCustomApplication) + protected + FDataset: TStringList; + FDatasetSize: integer; + FNN: TNNet; + NFit: TNeuralDataLoadingFit; + FSampler: TNNetSamplerBase; + FMaxPredictCharPos: integer; + procedure LoadDataset; + procedure DoRun; override; + public + procedure OnAfterEpoch(Sender: TObject); + procedure GetTrainingPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + procedure GetValidationPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + procedure GetTestPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + end; + + procedure TTestFitLoading.LoadDataset; + var + RowCnt: integer; + begin + FDataset.LoadFromFile(csTrainingFileName); + FDatasetSize := FDataset.Count; + for RowCnt := FDatasetSize-1 downto 0 do + begin + // removes too short strings + if Length(FDataset[RowCnt])<csMinSampleSize then FDataset.Delete(RowCnt); + end; + FDatasetSize := FDataset.Count; + for RowCnt := FDatasetSize-1 downto 0 do + begin + // removes too short strings + FDataset[RowCnt] := LowerCase(FDataset[RowCnt]) + chr(1); + end; + WriteLn('Loaded dataset with ', FDatasetSize, ' rows'); + end; + + procedure TTestFitLoading.DoRun; + begin + FDataset := TStringList.Create(); + LoadDataset(); + FNN := TNNet.Create(); + NFit := TNeuralDataLoadingFit.Create(); + FMaxPredictCharPos := csMinSampleSize; + FSampler := TNNetSamplerTopP.Create(0.4); + FNN.AddLayer([ + TNNetInput.Create(csContextLen, 1, csVocabSize), + TNNetPointwiseConv.Create(32,1), + TNNetPadXY.Create(1,0), + TNNetConvolutionReLU.Create(64,3,0,1,1), + TNNetMaxPool.Create(3), + TNNetPadXY.Create(1,0), + TNNetConvolutionReLU.Create(128*3,3,0,1,1), + TNNetPointwiseConvReLU.Create(1024,0), + TNNetMaxPoolWithPosition.Create(27,27,0,1,0), + TNNetPointwiseConvReLU.Create(1024), + TNNetPointwiseConvReLU.Create(128), + TNNetFullConnectLinear.Create(csVocabSize), + TNNetSoftMax.Create() + ]); + DebugThreadCount(); + FNN.DebugStructure; + + WriteLn('Computing...'); + NFit.MaxThreadNum := 32; + NFit.LogEveryBatches := 100; + NFit.InitialLearningRate := 0.0001; + NFit.LearningRateDecay := 0; + NFit.L2Decay := 0; + NFit.EnableClassComparison(); + NFit.EnableDefaultLoss(); + NFit.AvgWeightEpochCount := 1; + NFit.OnAfterEpoch := @OnAfterEpoch; + NFit.FitLoading( + FNN, + {TrainingVolumesCount=}32000*3, + {ValidationVolumesCount=}32000*3 div 20, + {TestVolumesCount=}32000*3 div 20, + {batchsize=}320, + {epochs=}500, + @GetTrainingPair, @GetValidationPair, @GetTestPair + ); + FNN.DebugWeights(); + OnAfterEpoch(Self); + FSampler.Free; + NFit.Free; + FNN.Free; + FDataset.Free; + Terminate; + end; + + procedure TTestFitLoading.OnAfterEpoch(Sender: TObject); + begin + WriteLn('Testing.'); + WriteLn(GenerateStringFromChars(NFit.NN, 'once', FSampler),'.'); + WriteLn(GenerateStringFromChars(NFit.NN, 'one ', FSampler),'.'); + WriteLn(GenerateStringFromChars(NFit.NN, 'once upon ', FSampler),'.'); + if NFit.TrainingAccuracy < 0.5 + then FMaxPredictCharPos := Max(FMaxPredictCharPos-1, csMinSampleSize) + else FMaxPredictCharPos := Min(FMaxPredictCharPos+1, csContextLen); + WriteLn('Max prediction pos is: ', FMaxPredictCharPos); + end; + + procedure TTestFitLoading.GetTrainingPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + var + SampleId: integer; + SampleLen: integer; + SampleCutPosition: integer; + ExpectedTokenChar: char; + ExpectedTokenInt: integer; + begin + // Make sure that expected input and output have the proper sizes. + if FNN.GetFirstLayer().Output.Size <> pInput.Size then pInput.ReSize(FNN.GetFirstLayer().Output); + if FNN.GetLastLayer().Output.Size <> pOutput.Size then pOutput.ReSize(FNN.GetLastLayer().Output); + // Get the input sample + SampleId := Random(FDatasetSize); + SampleLen := Min(Length(FDataset[SampleId]), pInput.SizeX); + SampleLen := Min(FMaxPredictCharPos, SampleLen); + SampleCutPosition := Random(SampleLen-csMinSampleSize)+csMinSampleSize; // -1 + // The expected token is the next character in the string + ExpectedTokenChar := FDataset[SampleId][SampleCutPosition+1]; + ExpectedTokenInt := Min(Ord(ExpectedTokenChar),pInput.Depth-1); + // Encode the input and output volumes + pInput.OneHotEncodingReversed(copy(FDataset[SampleId], 1, SampleCutPosition)); + pOutput.SetClassForSoftMax(ExpectedTokenInt); + pOutput.Tag := ExpectedTokenInt; + end; + + procedure TTestFitLoading.GetValidationPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + var + SampleId: integer; + SampleLen: integer; + SampleCutPosition: integer; + ExpectedTokenChar: char; + ExpectedTokenInt: integer; + begin + // Make sure that expected input and output have the proper sizes. + if FNN.GetFirstLayer().Output.Size <> pInput.Size then pInput.ReSize(FNN.GetFirstLayer().Output); + if FNN.GetLastLayer().Output.Size <> pOutput.Size then pOutput.ReSize(FNN.GetLastLayer().Output); + // Get the input sample + SampleId := Idx; + SampleLen := Min(Length(FDataset[SampleId]), pInput.SizeX); + SampleCutPosition := (Idx mod (1+SampleLen-csMinSampleSize))+csMinSampleSize-1; + // The expected token is the next character in the string + ExpectedTokenChar := FDataset[SampleId][SampleCutPosition+1]; + ExpectedTokenInt := Min(Ord(ExpectedTokenChar),pInput.Depth-1); + // Encode the input and output volumes + pInput.OneHotEncodingReversed(copy(FDataset[SampleId], 1, SampleCutPosition)); + pOutput.SetClassForSoftMax(ExpectedTokenInt); + pOutput.Tag := ExpectedTokenInt; + end; + + procedure TTestFitLoading.GetTestPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + begin + GetValidationPair(Idx, ThreadId, pInput, pOutput); + end; + +var + Application: TTestFitLoading; +begin + Application := TTestFitLoading.Create(nil); + Application.Title:='Nano Covolutional Based NLP Trained from File'; + Application.Run; + Application.Free; +end. diff --git a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb index 9339281f..de2ff831 100644 --- a/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb +++ b/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.ipynb @@ -1,7494 +1,409 @@ { - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# This is a simple plant leaf disease classifier inspired from Data from: \n", - "# Identification of Plant Leaf Diseases Using a 9-layer Deep Convolutional Neural Network\n", - "# https://data.mendeley.com/datasets/tywbtsjrjv/1\n", - "\n", - "# This source code required the CAI Neural API found at:\n", - "# https://github.com/joaopauloschuler/neural-api\n", - "\n", - "# To be able to run this code, you'll need at least 32GB of RAM.\n", - "\n", - "has_plant_leaf_disease = True\n", - "has_tiny_imagenet_200 = False" - ] - }, - { - "cell_type": "code", - "execution_count": 1, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Reading package lists... Done\n", - "Building dependency tree \n", - "Reading state information... Done\n", - "unzip is already the newest version (6.0-21ubuntu1).\n", - "git is already the newest version (1:2.17.1-1ubuntu0.5).\n", - "The following additional packages will be installed:\n", - " adwaita-icon-theme autoconf automake autopoint autotools-dev bsdmainutils\n", - " debhelper dh-autoreconf dh-strip-nondeterminism file fontconfig\n", - " fontconfig-config fonts-dejavu-core fp-compiler-3.0.4 fp-docs-3.0.4\n", - " fp-ide-3.0.4 fp-units-base-3.0.4 fp-units-db-3.0.4 fp-units-fcl-3.0.4\n", - " fp-units-fv-3.0.4 fp-units-gfx-3.0.4 fp-units-gtk2-3.0.4 fp-units-math-3.0.4\n", - " fp-units-misc-3.0.4 fp-units-multimedia-3.0.4 fp-units-net-3.0.4\n", - " fp-units-rtl-3.0.4 fp-utils-3.0.4 fpc-3.0.4 fpc-source-3.0.4 gdb gdbserver\n", - " gettext gettext-base gir1.2-atk-1.0 gir1.2-freedesktop gir1.2-gdkpixbuf-2.0\n", - " gir1.2-gtk-2.0 gir1.2-harfbuzz-0.0 gir1.2-pango-1.0 groff-base\n", - " gtk-update-icon-cache hicolor-icon-theme humanity-icon-theme icu-devtools\n", - " intltool-debian lazarus-1.8 lazarus-doc-1.8 lazarus-ide lazarus-ide-1.8\n", - " lazarus-ide-gtk2-1.8 lazarus-src-1.8 lcl-1.8 lcl-gtk2-1.8 lcl-nogui-1.8\n", - " lcl-units-1.8 lcl-utils-1.8 liba52-0.7.4 liba52-0.7.4-dev libapr1\n", - " libaprutil1 libarchive-cpio-perl libarchive-zip-perl libasound2\n", - " libasound2-data libasound2-dev libasyncns0 libatk1.0-0 libatk1.0-data\n", - " libatk1.0-dev libavahi-client3 libavahi-common-data libavahi-common3\n", - " libbabeltrace1 libc6-dbg libcaca-dev libcaca0 libcairo-gobject2\n", - " libcairo-script-interpreter2 libcairo2 libcairo2-dev libcdt5 libcgraph6\n", - " libcroco3 libcups2 libdatrie1 libdca-dev libdca0 libdrm-amdgpu1\n", - " libdrm-common libdrm-dev libdrm-intel1 libdrm-nouveau2 libdrm-radeon1\n", - " libdrm2 libdts-dev libdw1 libegl-mesa0 libegl1 libelf1\n", - " libfile-stripnondeterminism-perl libflac-dev libflac8 libfluidsynth1\n", - " libfontconfig1 libfontconfig1-dev libforms-dev libforms2 libgail-common\n", - " libgail18 libgbm1 libgd-dev libgd3 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-bin\n", - " libgdk-pixbuf2.0-common libgdk-pixbuf2.0-dev libgl1 libgl1-mesa-dev\n", - " libgl1-mesa-dri libglapi-mesa libgles1 libgles2 libglib2.0-0 libglib2.0-bin\n", - " libglib2.0-data libglib2.0-dev libglib2.0-dev-bin libglu1-mesa\n", - " libglu1-mesa-dev libglvnd-core-dev libglvnd-dev libglvnd0 libglx-mesa0\n", - " libglx0 libgmp-dev libgmpxx4ldbl libgraphite2-3 libgraphite2-dev\n", - " libgraphviz-dev libgtk2.0-0 libgtk2.0-bin libgtk2.0-common libgtk2.0-dev\n", - " libgts-0.7-5 libgts-bin libgvc6 libgvc6-plugins-gtk libgvpr2 libharfbuzz-dev\n", - " libharfbuzz-gobject0 libharfbuzz-icu0 libharfbuzz0b libice-dev libice6\n", - " libicu-dev libicu-le-hb-dev libicu-le-hb0 libicu60 libiculx60 libidn11\n", - " libjack-jackd2-0 libjbig-dev libjbig0 liblab-gamut1 libllvm9 libltdl-dev\n", - " libltdl7 liblzma-dev liblzo2-2 libmad0 libmad0-dev libmagic-mgc libmagic1\n", - " libmail-sendmail-perl libmikmod-config libmikmod-dev libmikmod3\n", - " libmodplug-dev libmodplug1 libogg-dev libogg0 libopenal-data libopenal1\n", - " libopengl0 libpango-1.0-0 libpango1.0-dev libpangocairo-1.0-0\n", - " libpangoft2-1.0-0 libpangoxft-1.0-0 libpathplan4 libpciaccess0 libpcre16-3\n", - " libpcre3-dev libpcre32-3 libpcrecpp0v5 libpipeline1 libpixman-1-0\n", - " libpixman-1-dev libproxy-tools libproxy1v5 libpthread-stubs0-dev\n", - " libpulse-dev libpulse-mainloop-glib0 libpulse0 libpython-stdlib\n", - " libpython2.7-minimal libpython2.7-stdlib librsvg2-2 librsvg2-common\n", - " libsamplerate0 libsdl-mixer1.2 libsdl-mixer1.2-dev libsdl1.2-dev\n", - " libsdl1.2debian libsdl2-2.0-0 libsensors4 libserf-1-1 libsigsegv2 libslang2\n", - " libslang2-dev libsm-dev libsm6 libsndfile1 libsndio6.1 libsvn1\n", - " libsys-hostname-long-perl libthai-data libthai0 libtiff-dev libtiff5\n", - " libtiff5-dev libtiffxx5 libtimedate-perl libtool libvlc-bin libvlc-dev\n", - " libvlc5 libvlccore9 libvorbis-dev libvorbis0a libvorbisenc2 libvorbisfile3\n", - " libvpx-dev libvpx5 libwayland-client0 libwayland-cursor0 libwayland-egl1\n", - " libwayland-egl1-mesa libwayland-server0 libwebp6 libx11-6 libx11-data\n", - " libx11-dev libx11-doc libx11-xcb-dev libx11-xcb1 libxau-dev libxau6\n", - " libxcb-dri2-0 libxcb-dri2-0-dev libxcb-dri3-0 libxcb-dri3-dev libxcb-glx0\n", - " libxcb-glx0-dev libxcb-present-dev libxcb-present0 libxcb-randr0\n", - " libxcb-randr0-dev libxcb-render0 libxcb-render0-dev libxcb-shape0\n", - " libxcb-shape0-dev libxcb-shm0 libxcb-shm0-dev libxcb-sync-dev libxcb-sync1\n", - " libxcb-xfixes0 libxcb-xfixes0-dev libxcb1 libxcb1-dev libxcomposite-dev\n", - " libxcomposite1 libxcursor-dev libxcursor1 libxdamage-dev libxdamage1\n", - " libxdmcp-dev libxdmcp6 libxdot4 libxext-dev libxext6 libxfixes-dev\n", - " libxfixes3 libxft-dev libxft2 libxi-dev libxi6 libxinerama-dev libxinerama1\n", - " libxkbcommon0 libxml2 libxml2-utils libxpm-dev libxpm4 libxrandr-dev\n", - " libxrandr2 libxrender-dev libxrender1 libxshmfence-dev libxshmfence1 libxss1\n", - " libxt-dev libxt6 libxxf86dga-dev libxxf86dga1 libxxf86vm-dev libxxf86vm1 m4\n", - " man-db mesa-common-dev po-debconf python python-minimal python2.7\n", - " python2.7-minimal shared-mime-info timgm6mb-soundfont ubuntu-mono x11-common\n", - " x11proto-composite-dev x11proto-core-dev x11proto-damage-dev x11proto-dev\n", - " x11proto-fixes-dev x11proto-input-dev x11proto-randr-dev x11proto-xext-dev\n", - " x11proto-xf86dga-dev x11proto-xf86vidmode-dev x11proto-xinerama-dev xkb-data\n", - " xorg-sgml-doctools xtrans-dev\n", - "Suggested packages:\n", - " autoconf-archive gnu-standards autoconf-doc wamerican | wordlist whois\n", - " vacation dh-make dwz uuid-dev firebird-dev freetds-dev libgdbm-dev\n", - " default-libmysqlclient-dev libpq-dev libsqlite3-dev pxlib-dev unixodbc-dev\n", - " gdb-doc gettext-doc libasprintf-dev libgettextpo-dev groff\n", - " libasound2-plugins alsa-utils libasound2-doc libcairo2-doc cups-common\n", - " libforms-bin libforms-doc libgd-tools libglib2.0-doc gmp-doc libgmp10-doc\n", - " libmpfr-dev libgraphite2-utils gvfs libgtk2.0-doc libice-doc icu-doc jackd2\n", - " libtool-doc liblzma-doc libportaudio2 imagemagick libpango1.0-doc pciutils\n", - " pulseaudio librsvg2-bin lm-sensors libsm-doc sndiod gfortran\n", - " | fortran95-compiler gcj-jdk libxcb-doc libxext-doc libxt-doc m4-doc\n", - " apparmor www-browser libmail-box-perl python-doc python-tk python2.7-doc\n", - " binfmt-support db5.3-util libapache2-mod-svn subversion-tools\n", - " fluid-soundfont-gm fluidsynth timidity musescore\n", - "Recommended packages:\n", - " libggi2-dev xdg-user-dirs\n", - "The following NEW packages will be installed:\n", - " adwaita-icon-theme autoconf automake autopoint autotools-dev bsdmainutils\n", - " debhelper dh-autoreconf dh-strip-nondeterminism file fontconfig\n", - " fontconfig-config fonts-dejavu-core fp-compiler-3.0.4 fp-docs-3.0.4\n", - " fp-ide-3.0.4 fp-units-base-3.0.4 fp-units-db-3.0.4 fp-units-fcl-3.0.4\n", - " fp-units-fv-3.0.4 fp-units-gfx-3.0.4 fp-units-gtk2-3.0.4 fp-units-math-3.0.4\n", - " fp-units-misc-3.0.4 fp-units-multimedia-3.0.4 fp-units-net-3.0.4\n", - " fp-units-rtl-3.0.4 fp-utils-3.0.4 fpc fpc-3.0.4 fpc-source fpc-source-3.0.4\n", - " gdb gdbserver gettext gettext-base gir1.2-atk-1.0 gir1.2-freedesktop\n", - " gir1.2-gdkpixbuf-2.0 gir1.2-gtk-2.0 gir1.2-harfbuzz-0.0 gir1.2-pango-1.0\n", - " groff-base gtk-update-icon-cache hicolor-icon-theme humanity-icon-theme\n", - " icu-devtools intltool-debian lazarus lazarus-1.8 lazarus-doc-1.8 lazarus-ide\n", - " lazarus-ide-1.8 lazarus-ide-gtk2-1.8 lazarus-src-1.8 lcl-1.8 lcl-gtk2-1.8\n", - " lcl-nogui-1.8 lcl-units-1.8 lcl-utils-1.8 liba52-0.7.4 liba52-0.7.4-dev\n", - " libapr1 libaprutil1 libarchive-cpio-perl libarchive-zip-perl libasound2\n", - " libasound2-data libasound2-dev libasyncns0 libatk1.0-0 libatk1.0-data\n", - " libatk1.0-dev libavahi-client3 libavahi-common-data libavahi-common3\n", - " libbabeltrace1 libc6-dbg libcaca-dev libcaca0 libcairo-gobject2\n", - " libcairo-script-interpreter2 libcairo2 libcairo2-dev libcdt5 libcgraph6\n", - " libcroco3 libcups2 libdatrie1 libdca-dev libdca0 libdrm-amdgpu1\n", - " libdrm-common libdrm-dev libdrm-intel1 libdrm-nouveau2 libdrm-radeon1\n", - " libdrm2 libdts-dev libdw1 libegl-mesa0 libegl1 libelf1\n", - " libfile-stripnondeterminism-perl libflac-dev libflac8 libfluidsynth1\n", - " libfontconfig1 libfontconfig1-dev libforms-dev libforms2 libgail-common\n", - " libgail18 libgbm1 libgd-dev libgd3 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-bin\n", - " libgdk-pixbuf2.0-common libgdk-pixbuf2.0-dev libgl1 libgl1-mesa-dev\n", - " libgl1-mesa-dri libglapi-mesa libgles1 libgles2 libglib2.0-bin\n", - " libglib2.0-data libglib2.0-dev libglib2.0-dev-bin libglu1-mesa\n", - " libglu1-mesa-dev libglvnd-core-dev libglvnd-dev libglvnd0 libglx-mesa0\n", - " libglx0 libgmp-dev libgmpxx4ldbl libgraphite2-3 libgraphite2-dev\n", - " libgraphviz-dev libgtk2.0-0 libgtk2.0-bin libgtk2.0-common libgtk2.0-dev\n", - " libgts-0.7-5 libgts-bin libgvc6 libgvc6-plugins-gtk libgvpr2 libharfbuzz-dev\n", - " libharfbuzz-gobject0 libharfbuzz-icu0 libharfbuzz0b libice-dev libice6\n", - " libicu-dev libicu-le-hb-dev libicu-le-hb0 libicu60 libiculx60 libidn11\n", - " libjack-jackd2-0 libjbig-dev libjbig0 liblab-gamut1 libllvm9 libltdl-dev\n", - " libltdl7 liblzma-dev liblzo2-2 libmad0 libmad0-dev libmagic-mgc libmagic1\n", - " libmail-sendmail-perl libmikmod-config libmikmod-dev libmikmod3\n", - " libmodplug-dev libmodplug1 libogg-dev libogg0 libopenal-data libopenal1\n", - " libopengl0 libpango-1.0-0 libpango1.0-dev libpangocairo-1.0-0\n", - " libpangoft2-1.0-0 libpangoxft-1.0-0 libpathplan4 libpciaccess0 libpcre16-3\n", - " libpcre3-dev libpcre32-3 libpcrecpp0v5 libpipeline1 libpixman-1-0\n", - " libpixman-1-dev libproxy-tools libproxy1v5 libpthread-stubs0-dev\n", - " libpulse-dev libpulse-mainloop-glib0 libpulse0 libpython-stdlib\n", - " libpython2.7-minimal libpython2.7-stdlib librsvg2-2 librsvg2-common\n", - " libsamplerate0 libsdl-mixer1.2 libsdl-mixer1.2-dev libsdl1.2-dev\n", - " libsdl1.2debian libsdl2-2.0-0 libsensors4 libserf-1-1 libsigsegv2 libslang2\n", - " libslang2-dev libsm-dev libsm6 libsndfile1 libsndio6.1 libsvn1\n", - " libsys-hostname-long-perl libthai-data libthai0 libtiff-dev libtiff5\n", - " libtiff5-dev libtiffxx5 libtimedate-perl libtool libvlc-bin libvlc-dev\n", - " libvlc5 libvlccore9 libvorbis-dev libvorbis0a libvorbisenc2 libvorbisfile3\n", - " libvpx-dev libvpx5 libwayland-client0 libwayland-cursor0 libwayland-egl1\n", - " libwayland-egl1-mesa libwayland-server0 libwebp6 libx11-6 libx11-data\n", - " libx11-dev libx11-doc libx11-xcb-dev libx11-xcb1 libxau-dev libxau6\n", - " libxcb-dri2-0 libxcb-dri2-0-dev libxcb-dri3-0 libxcb-dri3-dev libxcb-glx0\n", - " libxcb-glx0-dev libxcb-present-dev libxcb-present0 libxcb-randr0\n", - " libxcb-randr0-dev libxcb-render0 libxcb-render0-dev libxcb-shape0\n", - " libxcb-shape0-dev libxcb-shm0 libxcb-shm0-dev libxcb-sync-dev libxcb-sync1\n", - " libxcb-xfixes0 libxcb-xfixes0-dev libxcb1 libxcb1-dev libxcomposite-dev\n", - " libxcomposite1 libxcursor-dev libxcursor1 libxdamage-dev libxdamage1\n", - " libxdmcp-dev libxdmcp6 libxdot4 libxext-dev libxext6 libxfixes-dev\n", - " libxfixes3 libxft-dev libxft2 libxi-dev libxi6 libxinerama-dev libxinerama1\n", - " libxkbcommon0 libxml2 libxml2-utils libxpm-dev libxpm4 libxrandr-dev\n", - " libxrandr2 libxrender-dev libxrender1 libxshmfence-dev libxshmfence1 libxss1\n", - " libxt-dev libxt6 libxxf86dga-dev libxxf86dga1 libxxf86vm-dev libxxf86vm1 m4\n", - " man-db mesa-common-dev po-debconf python python-minimal python2.7\n", - " python2.7-minimal shared-mime-info subversion timgm6mb-soundfont ubuntu-mono\n", - " x11-common x11proto-composite-dev x11proto-core-dev x11proto-damage-dev\n", - " x11proto-dev x11proto-fixes-dev x11proto-input-dev x11proto-randr-dev\n", - " x11proto-xext-dev x11proto-xf86dga-dev x11proto-xf86vidmode-dev\n", - " x11proto-xinerama-dev xkb-data xorg-sgml-doctools xtrans-dev zip\n", - "The following packages will be upgraded:\n", - " libglib2.0-0\n", - "1 upgraded, 348 newly installed, 0 to remove and 37 not upgraded.\n", - "Need to get 243 MB of archives.\n", - "After this operation, 1802 MB of additional disk space will be used.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:1 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxau6 amd64 1:1.0.8-1 [8376 B]\n", - "Get:2 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdmcp6 amd64 1:1.1.2-3 [10.7 kB]\n", - "Get:3 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb1 amd64 1.13-2~ubuntu18.04 [45.5 kB]\n", - "Get:4 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-data all 2:1.6.4-3ubuntu0.2 [113 kB]\n", - "Get:5 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-6 amd64 2:1.6.4-3ubuntu0.2 [569 kB]\n", - "Get:6 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxext6 amd64 2:1.3.3-1 [29.4 kB]\n", - "Get:7 http://archive.ubuntu.com/ubuntu bionic/main amd64 bsdmainutils amd64 11.1.2ubuntu1 [181 kB]\n", - "Get:8 http://archive.ubuntu.com/ubuntu bionic/main amd64 groff-base amd64 1.22.3-10 [1153 kB]\n", - "Get:9 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpipeline1 amd64 1.5.0-1 [25.3 kB]\n", - "Get:10 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 man-db amd64 2.8.3-2ubuntu0.1 [1019 kB]\n", - "Get:11 http://archive.ubuntu.com/ubuntu bionic/main amd64 fonts-dejavu-core all 2.37-1 [1041 kB]\n", - "Get:12 http://archive.ubuntu.com/ubuntu bionic/main amd64 fontconfig-config all 2.12.6-0ubuntu2 [55.8 kB]\n", - "Get:13 http://archive.ubuntu.com/ubuntu bionic/main amd64 libfontconfig1 amd64 2.12.6-0ubuntu2 [137 kB]\n", - "Get:14 http://archive.ubuntu.com/ubuntu bionic/main amd64 fontconfig amd64 2.12.6-0ubuntu2 [169 kB]\n", - "Get:15 http://archive.ubuntu.com/ubuntu bionic/main amd64 liblzo2-2 amd64 2.08-1.2 [48.7 kB]\n", - "Get:16 http://archive.ubuntu.com/ubuntu bionic/main amd64 libogg0 amd64 1.3.2-1 [17.2 kB]\n", - "Get:17 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 x11-common all 1:7.7+19ubuntu7.1 [22.5 kB]\n", - "Get:18 http://archive.ubuntu.com/ubuntu bionic/main amd64 libice6 amd64 2:1.0.9-2 [40.2 kB]\n", - "Get:19 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsm6 amd64 2:1.2.2-1 [15.8 kB]\n", - "Get:20 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrender1 amd64 1:0.9.10-1 [18.7 kB]\n", - "Get:21 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxft2 amd64 2.3.2-1 [36.1 kB]\n", - "Get:22 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxinerama1 amd64 2:1.1.3-1 [7908 B]\n", - "Get:23 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxss1 amd64 1:1.2.2-1 [8582 B]\n", - "Get:24 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86dga1 amd64 2:1.1.4-1 [13.7 kB]\n", - "Get:25 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86vm1 amd64 1:1.1.4-1 [10.6 kB]\n", - "Get:26 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpython2.7-minimal amd64 2.7.17-1~18.04 [335 kB]\n", - "Get:27 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 python2.7-minimal amd64 2.7.17-1~18.04 [1294 kB]\n", - "Get:28 http://archive.ubuntu.com/ubuntu bionic/main amd64 python-minimal amd64 2.7.15~rc1-1 [28.1 kB]\n", - "Get:29 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpython2.7-stdlib amd64 2.7.17-1~18.04 [1915 kB]\n", - "Get:30 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 python2.7 amd64 2.7.17-1~18.04 [248 kB]\n", - "Get:31 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpython-stdlib amd64 2.7.15~rc1-1 [7620 B]\n", - "Get:32 http://archive.ubuntu.com/ubuntu bionic/main amd64 python amd64 2.7.15~rc1-1 [140 kB]\n", - "Get:33 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libmagic-mgc amd64 1:5.32-2ubuntu0.3 [184 kB]\n", - "Get:34 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libmagic1 amd64 1:5.32-2ubuntu0.3 [68.7 kB]\n", - "Get:35 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 file amd64 1:5.32-2ubuntu0.3 [22.1 kB]\n", - "Get:36 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libelf1 amd64 0.170-0.4ubuntu0.1 [44.8 kB]\n", - "Get:37 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-0 amd64 2.56.4-0ubuntu0.18.04.6 [1171 kB]\n", - "Get:38 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-data all 2.56.4-0ubuntu0.18.04.6 [4540 B]\n", - "Get:39 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libicu60 amd64 60.2-3ubuntu3.1 [8054 kB]\n", - "Get:40 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libidn11 amd64 1.33-2.1ubuntu1.2 [46.6 kB]\n", - "Get:41 http://archive.ubuntu.com/ubuntu bionic/main amd64 libslang2 amd64 2.3.1a-3ubuntu1 [424 kB]\n", - "Get:42 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxml2 amd64 2.9.4+dfsg1-6.1ubuntu1.3 [663 kB]\n", - "Get:43 http://archive.ubuntu.com/ubuntu bionic/main amd64 shared-mime-info amd64 1.9-2 [426 kB]\n", - "Get:44 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 xkb-data all 2.23.1-1ubuntu1.18.04.1 [325 kB]\n", - "Get:45 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gettext-base amd64 0.19.8.1-6ubuntu0.3 [113 kB]\n", - "Get:46 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-common all 2.4.99-1ubuntu1~18.04.2 [5328 B]\n", - "Get:47 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm2 amd64 2.4.99-1ubuntu1~18.04.2 [31.7 kB]\n", - "Get:48 http://archive.ubuntu.com/ubuntu bionic/main amd64 hicolor-icon-theme all 0.17-2 [9976 B]\n", - "Get:49 http://archive.ubuntu.com/ubuntu bionic/main amd64 libjbig0 amd64 2.1-3.1build1 [26.7 kB]\n", - "Get:50 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiff5 amd64 4.0.9-5ubuntu0.3 [153 kB]\n", - "Get:51 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-common all 2.36.11-2 [4536 B]\n", - "Get:52 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-0 amd64 2.36.11-2 [165 kB]\n", - "Get:53 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gtk-update-icon-cache amd64 3.22.30-1ubuntu4 [28.3 kB]\n", - "Get:54 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpixman-1-0 amd64 0.34.0-2 [229 kB]\n", - "Get:55 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-render0 amd64 1.13-2~ubuntu18.04 [14.7 kB]\n", - "Get:56 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shm0 amd64 1.13-2~ubuntu18.04 [5600 B]\n", - "Get:57 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo2 amd64 1.15.10-2ubuntu0.1 [580 kB]\n", - "Get:58 http://archive.ubuntu.com/ubuntu bionic/main amd64 libcroco3 amd64 0.6.12-2 [81.3 kB]\n", - "Get:59 http://archive.ubuntu.com/ubuntu bionic/main amd64 libthai-data all 0.1.27-2 [133 kB]\n", - "Get:60 http://archive.ubuntu.com/ubuntu bionic/main amd64 libdatrie1 amd64 0.2.10-7 [17.8 kB]\n", - "Get:61 http://archive.ubuntu.com/ubuntu bionic/main amd64 libthai0 amd64 0.1.27-2 [18.0 kB]\n", - "Get:62 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpango-1.0-0 amd64 1.40.14-1ubuntu0.1 [153 kB]\n", - "Get:63 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgraphite2-3 amd64 1.3.11-2 [78.7 kB]\n", - "Get:64 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz0b amd64 1.7.2-1ubuntu1 [232 kB]\n", - "Get:65 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpangoft2-1.0-0 amd64 1.40.14-1ubuntu0.1 [33.2 kB]\n", - "Get:66 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpangocairo-1.0-0 amd64 1.40.14-1ubuntu0.1 [20.8 kB]\n", - "Get:67 http://archive.ubuntu.com/ubuntu bionic/main amd64 librsvg2-2 amd64 2.40.20-2 [98.6 kB]\n", - "Get:68 http://archive.ubuntu.com/ubuntu bionic/main amd64 librsvg2-common amd64 2.40.20-2 [5124 B]\n", - "Get:69 http://archive.ubuntu.com/ubuntu bionic/main amd64 humanity-icon-theme all 0.6.15 [1250 kB]\n", - "Get:70 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 ubuntu-mono all 16.10+18.04.20181005-0ubuntu1 [149 kB]\n", - "Get:71 http://archive.ubuntu.com/ubuntu bionic/main amd64 adwaita-icon-theme all 3.28.0-1ubuntu1 [3306 kB]\n", - "Get:72 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsigsegv2 amd64 2.12-1 [14.7 kB]\n", - "Get:73 http://archive.ubuntu.com/ubuntu bionic/main amd64 m4 amd64 1.4.18-1 [197 kB]\n", - "Get:74 http://archive.ubuntu.com/ubuntu bionic/main amd64 autoconf all 2.69-11 [322 kB]\n", - "Get:75 http://archive.ubuntu.com/ubuntu bionic/main amd64 autotools-dev all 20180224.1 [39.6 kB]\n", - "Get:76 http://archive.ubuntu.com/ubuntu bionic/main amd64 automake all 1:1.15.1-3ubuntu2 [509 kB]\n", - "Get:77 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 autopoint all 0.19.8.1-6ubuntu0.3 [426 kB]\n", - "Get:78 http://archive.ubuntu.com/ubuntu bionic/main amd64 libtool all 2.4.6-2 [194 kB]\n", - "Get:79 http://archive.ubuntu.com/ubuntu bionic/main amd64 dh-autoreconf all 17 [15.8 kB]\n", - "Get:80 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libarchive-zip-perl all 1.60-1ubuntu0.1 [84.6 kB]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:81 http://archive.ubuntu.com/ubuntu bionic/main amd64 libfile-stripnondeterminism-perl all 0.040-1.1~build1 [13.8 kB]\n", - "Get:82 http://archive.ubuntu.com/ubuntu bionic/main amd64 libtimedate-perl all 2.3000-2 [37.5 kB]\n", - "Get:83 http://archive.ubuntu.com/ubuntu bionic/main amd64 dh-strip-nondeterminism all 0.040-1.1~build1 [5208 B]\n", - "Get:84 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gettext amd64 0.19.8.1-6ubuntu0.3 [1293 kB]\n", - "Get:85 http://archive.ubuntu.com/ubuntu bionic/main amd64 intltool-debian all 0.35.0+20060710.4 [24.9 kB]\n", - "Get:86 http://archive.ubuntu.com/ubuntu bionic/main amd64 po-debconf all 1.0.20 [232 kB]\n", - "Get:87 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 debhelper all 11.1.6ubuntu2 [902 kB]\n", - "Get:88 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-rtl-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [2039 kB]\n", - "Get:89 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-compiler-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1949 kB]\n", - "Get:90 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-docs-3.0.4 all 3.0.4+dfsg-18ubuntu2 [882 kB]\n", - "Get:91 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-ide-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1453 kB]\n", - "Get:92 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-base-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1050 kB]\n", - "Get:93 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-db-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [745 kB]\n", - "Get:94 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-fcl-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [4798 kB]\n", - "Get:95 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-fv-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [297 kB]\n", - "Get:96 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-gfx-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1099 kB]\n", - "Get:97 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-common all 2.24.32-1ubuntu1 [125 kB]\n", - "Get:98 http://archive.ubuntu.com/ubuntu bionic/main amd64 libatk1.0-data all 2.28.1-1 [2992 B]\n", - "Get:99 http://archive.ubuntu.com/ubuntu bionic/main amd64 libatk1.0-0 amd64 2.28.1-1 [43.9 kB]\n", - "Get:100 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libavahi-common-data amd64 0.7-3.1ubuntu1.2 [22.1 kB]\n", - "Get:101 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libavahi-common3 amd64 0.7-3.1ubuntu1.2 [21.6 kB]\n", - "Get:102 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libavahi-client3 amd64 0.7-3.1ubuntu1.2 [25.2 kB]\n", - "Get:103 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcups2 amd64 2.2.7-1ubuntu2.7 [211 kB]\n", - "Get:104 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcomposite1 amd64 1:0.4.4-2 [6988 B]\n", - "Get:105 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxfixes3 amd64 1:5.0.3-1 [10.8 kB]\n", - "Get:106 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcursor1 amd64 1:1.1.15-1 [19.8 kB]\n", - "Get:107 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdamage1 amd64 1:1.1.4-3 [6934 B]\n", - "Get:108 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxi6 amd64 2:1.7.9-1 [29.2 kB]\n", - "Get:109 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrandr2 amd64 2:1.5.1-1 [18.1 kB]\n", - "Get:110 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-0 amd64 2.24.32-1ubuntu1 [1769 kB]\n", - "Get:111 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-atk-1.0 amd64 2.28.1-1 [17.8 kB]\n", - "Get:112 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-freedesktop amd64 1.56.1-1 [9080 B]\n", - "Get:113 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-gdkpixbuf-2.0 amd64 2.36.11-2 [7748 B]\n", - "Get:114 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpangoxft-1.0-0 amd64 1.40.14-1ubuntu0.1 [15.0 kB]\n", - "Get:115 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gir1.2-pango-1.0 amd64 1.40.14-1ubuntu0.1 [21.6 kB]\n", - "Get:116 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-gtk-2.0 amd64 2.24.32-1ubuntu1 [172 kB]\n", - "Get:117 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-bin amd64 2.56.4-0ubuntu0.18.04.6 [68.8 kB]\n", - "Get:118 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-dev-bin amd64 2.56.4-0ubuntu0.18.04.6 [102 kB]\n", - "Get:119 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcre16-3 amd64 2:8.39-9 [147 kB]\n", - "Get:120 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcre32-3 amd64 2:8.39-9 [138 kB]\n", - "Get:121 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcrecpp0v5 amd64 2:8.39-9 [15.3 kB]\n", - "Get:122 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpcre3-dev amd64 2:8.39-9 [537 kB]\n", - "Get:123 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglib2.0-dev amd64 2.56.4-0ubuntu0.18.04.6 [1385 kB]\n", - "Get:124 http://archive.ubuntu.com/ubuntu bionic/main amd64 xorg-sgml-doctools all 1:1.11-1 [12.9 kB]\n", - "Get:125 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-dev all 2018.4-4 [251 kB]\n", - "Get:126 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-core-dev all 2018.4-4 [2620 B]\n", - "Get:127 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxau-dev amd64 1:1.0.8-1 [11.1 kB]\n", - "Get:128 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdmcp-dev amd64 1:1.1.2-3 [25.1 kB]\n", - "Get:129 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-input-dev all 2018.4-4 [2620 B]\n", - "Get:130 http://archive.ubuntu.com/ubuntu bionic/main amd64 xtrans-dev all 1.3.5-1 [70.5 kB]\n", - "Get:131 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpthread-stubs0-dev amd64 0.3-4 [4068 B]\n", - "Get:132 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb1-dev amd64 1.13-2~ubuntu18.04 [80.0 kB]\n", - "Get:133 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-dev amd64 2:1.6.4-3ubuntu0.2 [640 kB]\n", - "Get:134 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-dev amd64 2.36.11-2 [46.8 kB]\n", - "Get:135 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo-gobject2 amd64 1.15.10-2ubuntu0.1 [17.1 kB]\n", - "Get:136 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo-script-interpreter2 amd64 1.15.10-2ubuntu0.1 [53.5 kB]\n", - "Get:137 http://archive.ubuntu.com/ubuntu bionic/main amd64 libfontconfig1-dev amd64 2.12.6-0ubuntu2 [689 kB]\n", - "Get:138 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrender-dev amd64 1:0.9.10-1 [24.9 kB]\n", - "Get:139 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xext-dev all 2018.4-4 [2620 B]\n", - "Get:140 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxext-dev amd64 2:1.3.3-1 [82.1 kB]\n", - "Get:141 http://archive.ubuntu.com/ubuntu bionic/main amd64 libice-dev amd64 2:1.0.9-2 [46.8 kB]\n", - "Get:142 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsm-dev amd64 2:1.2.2-1 [16.2 kB]\n", - "Get:143 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpixman-1-dev amd64 0.34.0-2 [244 kB]\n", - "Get:144 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-render0-dev amd64 1.13-2~ubuntu18.04 [18.4 kB]\n", - "Get:145 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shm0-dev amd64 1.13-2~ubuntu18.04 [6684 B]\n", - "Get:146 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcairo2-dev amd64 1.15.10-2ubuntu0.1 [626 kB]\n", - "Get:147 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz-icu0 amd64 1.7.2-1ubuntu1 [5604 B]\n", - "Get:148 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz-gobject0 amd64 1.7.2-1ubuntu1 [13.4 kB]\n", - "Get:149 http://archive.ubuntu.com/ubuntu bionic/main amd64 gir1.2-harfbuzz-0.0 amd64 1.7.2-1ubuntu1 [18.6 kB]\n", - "Get:150 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgraphite2-dev amd64 1.3.11-2 [14.5 kB]\n", - "Get:151 http://archive.ubuntu.com/ubuntu bionic/main amd64 libicu-le-hb0 amd64 1.0.3+git161113-4 [14.3 kB]\n", - "Get:152 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libiculx60 amd64 60.2-3ubuntu3.1 [19.0 kB]\n", - "Get:153 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 icu-devtools amd64 60.2-3ubuntu3.1 [179 kB]\n", - "Get:154 http://archive.ubuntu.com/ubuntu bionic/main amd64 libicu-le-hb-dev amd64 1.0.3+git161113-4 [29.5 kB]\n", - "Get:155 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libicu-dev amd64 60.2-3ubuntu3.1 [8889 kB]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:156 http://archive.ubuntu.com/ubuntu bionic/main amd64 libharfbuzz-dev amd64 1.7.2-1ubuntu1 [302 kB]\n", - "Get:157 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxft-dev amd64 2.3.2-1 [45.7 kB]\n", - "Get:158 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpango1.0-dev amd64 1.40.14-1ubuntu0.1 [288 kB]\n", - "Get:159 http://archive.ubuntu.com/ubuntu bionic/main amd64 libatk1.0-dev amd64 2.28.1-1 [79.9 kB]\n", - "Get:160 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xinerama-dev all 2018.4-4 [2628 B]\n", - "Get:161 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxinerama-dev amd64 2:1.1.3-1 [8404 B]\n", - "Get:162 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-fixes-dev all 1:2018.4-4 [2620 B]\n", - "Get:163 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxfixes-dev amd64 1:5.0.3-1 [11.0 kB]\n", - "Get:164 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxi-dev amd64 2:1.7.9-1 [186 kB]\n", - "Get:165 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-randr-dev all 2018.4-4 [2620 B]\n", - "Get:166 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxrandr-dev amd64 2:1.5.1-1 [24.0 kB]\n", - "Get:167 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcursor-dev amd64 1:1.1.15-1 [26.5 kB]\n", - "Get:168 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-composite-dev all 1:2018.4-4 [2620 B]\n", - "Get:169 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxcomposite-dev amd64 1:0.4.4-2 [9136 B]\n", - "Get:170 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-damage-dev all 1:2018.4-4 [2620 B]\n", - "Get:171 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxdamage-dev amd64 1:1.1.4-3 [5028 B]\n", - "Get:172 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxml2-utils amd64 2.9.4+dfsg1-6.1ubuntu1.3 [35.9 kB]\n", - "Get:173 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-dev amd64 2.24.32-1ubuntu1 [2652 kB]\n", - "Get:174 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-gtk2-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1013 kB]\n", - "Get:175 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-math-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [344 kB]\n", - "Get:176 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-misc-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [1087 kB]\n", - "Get:177 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-multimedia-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [208 kB]\n", - "Get:178 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-units-net-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [5416 kB]\n", - "Get:179 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc-source-3.0.4 all 3.0.4+dfsg-18ubuntu2 [17.0 MB]\n", - "Get:180 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fp-utils-3.0.4 amd64 3.0.4+dfsg-18ubuntu2 [2758 kB]\n", - "Get:181 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc-3.0.4 all 3.0.4+dfsg-18ubuntu2 [24.4 kB]\n", - "Get:182 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc all 3.0.4+dfsg-18ubuntu2 [23.9 kB]\n", - "Get:183 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 fpc-source all 3.0.4+dfsg-18ubuntu2 [23.9 kB]\n", - "Get:184 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdw1 amd64 0.170-0.4ubuntu0.1 [203 kB]\n", - "Get:185 http://archive.ubuntu.com/ubuntu bionic/main amd64 libbabeltrace1 amd64 1.5.5-1 [154 kB]\n", - "Get:186 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gdb amd64 8.1-0ubuntu3.2 [2938 kB]\n", - "Get:187 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 gdbserver amd64 8.1-0ubuntu3.2 [282 kB]\n", - "Get:188 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-ide-1.8 amd64 1.8.2+dfsg-3 [8386 kB]\n", - "Get:189 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-ide-gtk2-1.8 amd64 1.8.2+dfsg-3 [14.1 MB]\n", - "Get:190 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-ide all 1.8.2+dfsg-3 [28.9 kB]\n", - "Get:191 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-src-1.8 all 1.8.2+dfsg-3 [15.6 MB]\n", - "Get:192 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-nogui-1.8 amd64 1.8.2+dfsg-3 [6637 kB]\n", - "Get:193 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-gtk2-1.8 amd64 1.8.2+dfsg-3 [8523 kB]\n", - "Get:194 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-units-1.8 amd64 1.8.2+dfsg-3 [14.3 MB]\n", - "Get:195 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-utils-1.8 amd64 1.8.2+dfsg-3 [6694 kB]\n", - "Get:196 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lcl-1.8 amd64 1.8.2+dfsg-3 [28.9 kB]\n", - "Get:197 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-1.8 all 1.8.2+dfsg-3 [29.4 kB]\n", - "Get:198 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus all 1.8.2+dfsg-3 [28.8 kB]\n", - "Get:199 http://archive.ubuntu.com/ubuntu bionic/universe amd64 lazarus-doc-1.8 all 1.8.2+dfsg-3 [15.1 MB]\n", - "Get:200 http://archive.ubuntu.com/ubuntu bionic/universe amd64 liba52-0.7.4 amd64 0.7.4-19 [35.2 kB]\n", - "Get:201 http://archive.ubuntu.com/ubuntu bionic/universe amd64 liba52-0.7.4-dev amd64 0.7.4-19 [47.5 kB]\n", - "Get:202 http://archive.ubuntu.com/ubuntu bionic/main amd64 libapr1 amd64 1.6.3-2 [90.9 kB]\n", - "Get:203 http://archive.ubuntu.com/ubuntu bionic/main amd64 libaprutil1 amd64 1.6.1-2 [84.4 kB]\n", - "Get:204 http://archive.ubuntu.com/ubuntu bionic/main amd64 libarchive-cpio-perl all 0.10-1 [9644 B]\n", - "Get:205 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libasound2-data all 1.1.3-5ubuntu0.4 [38.0 kB]\n", - "Get:206 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libasound2 amd64 1.1.3-5ubuntu0.4 [361 kB]\n", - "Get:207 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libasound2-dev amd64 1.1.3-5ubuntu0.4 [123 kB]\n", - "Get:208 http://archive.ubuntu.com/ubuntu bionic/main amd64 libasyncns0 amd64 0.8-6 [12.1 kB]\n", - "Get:209 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcaca0 amd64 0.99.beta19-2ubuntu0.18.04.1 [203 kB]\n", - "Get:210 http://archive.ubuntu.com/ubuntu bionic/main amd64 libslang2-dev amd64 2.3.1a-3ubuntu1 [393 kB]\n", - "Get:211 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libcaca-dev amd64 0.99.beta19-2ubuntu0.18.04.1 [747 kB]\n", - "Get:212 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libcdt5 amd64 2.40.1-2 [19.6 kB]\n", - "Get:213 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libcgraph6 amd64 2.40.1-2 [40.8 kB]\n", - "Get:214 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-amdgpu1 amd64 2.4.99-1ubuntu1~18.04.2 [18.2 kB]\n", - "Get:215 http://archive.ubuntu.com/ubuntu bionic/main amd64 libpciaccess0 amd64 0.14-1 [17.9 kB]\n", - "Get:216 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-intel1 amd64 2.4.99-1ubuntu1~18.04.2 [59.9 kB]\n", - "Get:217 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-radeon1 amd64 2.4.99-1ubuntu1~18.04.2 [21.7 kB]\n", - "Get:218 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-nouveau2 amd64 2.4.99-1ubuntu1~18.04.2 [16.5 kB]\n", - "Get:219 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libdrm-dev amd64 2.4.99-1ubuntu1~18.04.2 [125 kB]\n", - "Get:220 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-server0 amd64 1.16.0-1ubuntu1.1~18.04.3 [29.6 kB]\n", - "Get:221 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgbm1 amd64 19.2.8-0ubuntu0~18.04.3 [28.1 kB]\n", - "Get:222 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglapi-mesa amd64 19.2.8-0ubuntu0~18.04.3 [26.5 kB]\n", - "Get:223 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-client0 amd64 1.16.0-1ubuntu1.1~18.04.3 [23.6 kB]\n", - "Get:224 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-xcb1 amd64 2:1.6.4-3ubuntu0.2 [9376 B]\n", - "Get:225 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri2-0 amd64 1.13-2~ubuntu18.04 [6920 B]\n", - "Get:226 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri3-0 amd64 1.13-2~ubuntu18.04 [6568 B]\n", - "Get:227 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-present0 amd64 1.13-2~ubuntu18.04 [5552 B]\n", - "Get:228 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-sync1 amd64 1.13-2~ubuntu18.04 [8808 B]\n", - "Get:229 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-xfixes0 amd64 1.13-2~ubuntu18.04 [9352 B]\n", - "Get:230 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxshmfence1 amd64 1.3-1 [5028 B]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:231 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libegl-mesa0 amd64 19.2.8-0ubuntu0~18.04.3 [95.1 kB]\n", - "Get:232 http://archive.ubuntu.com/ubuntu bionic/main amd64 libflac8 amd64 1.3.2-1 [213 kB]\n", - "Get:233 http://archive.ubuntu.com/ubuntu bionic/main amd64 libogg-dev amd64 1.3.2-1 [156 kB]\n", - "Get:234 http://archive.ubuntu.com/ubuntu bionic/main amd64 libflac-dev amd64 1.3.2-1 [260 kB]\n", - "Get:235 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsamplerate0 amd64 0.1.9-1 [938 kB]\n", - "Get:236 http://archive.ubuntu.com/ubuntu bionic/main amd64 libjack-jackd2-0 amd64 1.9.12~dfsg-2 [263 kB]\n", - "Get:237 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbis0a amd64 1.3.5-4.2 [86.4 kB]\n", - "Get:238 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbisenc2 amd64 1.3.5-4.2 [70.7 kB]\n", - "Get:239 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libsndfile1 amd64 1.0.28-4ubuntu0.18.04.1 [170 kB]\n", - "Get:240 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpulse0 amd64 1:11.1-1ubuntu7.4 [265 kB]\n", - "Get:241 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libfluidsynth1 amd64 1.1.9-1 [137 kB]\n", - "Get:242 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxpm4 amd64 1:3.5.12-1 [34.0 kB]\n", - "Get:243 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libforms2 amd64 1.2.3-1.3 [327 kB]\n", - "Get:244 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxpm-dev amd64 1:3.5.12-1 [87.4 kB]\n", - "Get:245 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libforms-dev amd64 1.2.3-1.3 [692 kB]\n", - "Get:246 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgail18 amd64 2.24.32-1ubuntu1 [14.2 kB]\n", - "Get:247 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgail-common amd64 2.24.32-1ubuntu1 [112 kB]\n", - "Get:248 http://archive.ubuntu.com/ubuntu bionic/main amd64 libwebp6 amd64 0.6.1-2 [185 kB]\n", - "Get:249 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgd3 amd64 2.2.5-4ubuntu0.4 [119 kB]\n", - "Get:250 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxt6 amd64 1:1.1.5-1 [160 kB]\n", - "Get:251 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxt-dev amd64 1:1.1.5-1 [395 kB]\n", - "Get:252 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libvpx5 amd64 1.7.0-3ubuntu0.18.04.1 [796 kB]\n", - "Get:253 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libvpx-dev amd64 1.7.0-3ubuntu0.18.04.1 [932 kB]\n", - "Get:254 http://archive.ubuntu.com/ubuntu bionic/main amd64 libjbig-dev amd64 2.1-3.1build1 [25.2 kB]\n", - "Get:255 http://archive.ubuntu.com/ubuntu bionic/main amd64 liblzma-dev amd64 5.2.2-1.3 [145 kB]\n", - "Get:256 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiffxx5 amd64 4.0.9-5ubuntu0.3 [5800 B]\n", - "Get:257 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiff5-dev amd64 4.0.9-5ubuntu0.3 [274 kB]\n", - "Get:258 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libtiff-dev amd64 4.0.9-5ubuntu0.3 [2260 B]\n", - "Get:259 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgd-dev amd64 2.2.5-4ubuntu0.4 [246 kB]\n", - "Get:260 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgdk-pixbuf2.0-bin amd64 2.36.11-2 [7864 B]\n", - "Get:261 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libllvm9 amd64 1:9-2~ubuntu18.04.2 [14.8 MB]\n", - "Get:262 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsensors4 amd64 1:3.4.0-4 [28.8 kB]\n", - "Get:263 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgl1-mesa-dri amd64 19.2.8-0ubuntu0~18.04.3 [8811 kB]\n", - "Get:264 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglvnd0 amd64 1.0.0-2ubuntu2.3 [47.0 kB]\n", - "Get:265 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgles1 amd64 1.0.0-2ubuntu2.3 [11.2 kB]\n", - "Get:266 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-glx0 amd64 1.13-2~ubuntu18.04 [22.1 kB]\n", - "Get:267 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglx-mesa0 amd64 19.2.8-0ubuntu0~18.04.3 [139 kB]\n", - "Get:268 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglx0 amd64 1.0.0-2ubuntu2.3 [28.1 kB]\n", - "Get:269 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgl1 amd64 1.0.0-2ubuntu2.3 [86.2 kB]\n", - "Get:270 http://archive.ubuntu.com/ubuntu bionic/main amd64 libglu1-mesa amd64 9.0.0-2.1build1 [168 kB]\n", - "Get:271 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 mesa-common-dev amd64 19.2.8-0ubuntu0~18.04.3 [651 kB]\n", - "Get:272 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglvnd-core-dev amd64 1.0.0-2ubuntu2.3 [12.8 kB]\n", - "Get:273 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libegl1 amd64 1.0.0-2ubuntu2.3 [32.0 kB]\n", - "Get:274 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgles2 amd64 1.0.0-2ubuntu2.3 [17.3 kB]\n", - "Get:275 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libopengl0 amd64 1.0.0-2ubuntu2.3 [31.3 kB]\n", - "Get:276 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libglvnd-dev amd64 1.0.0-2ubuntu2.3 [3416 B]\n", - "Get:277 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-xcb-dev amd64 2:1.6.4-3ubuntu0.2 [9756 B]\n", - "Get:278 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri3-dev amd64 1.13-2~ubuntu18.04 [7384 B]\n", - "Get:279 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-randr0 amd64 1.13-2~ubuntu18.04 [16.4 kB]\n", - "Get:280 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-randr0-dev amd64 1.13-2~ubuntu18.04 [20.4 kB]\n", - "Get:281 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shape0 amd64 1.13-2~ubuntu18.04 [5972 B]\n", - "Get:282 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-shape0-dev amd64 1.13-2~ubuntu18.04 [7144 B]\n", - "Get:283 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-xfixes0-dev amd64 1.13-2~ubuntu18.04 [11.7 kB]\n", - "Get:284 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-sync-dev amd64 1.13-2~ubuntu18.04 [10.6 kB]\n", - "Get:285 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-present-dev amd64 1.13-2~ubuntu18.04 [6972 B]\n", - "Get:286 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxshmfence-dev amd64 1.3-1 [3692 B]\n", - "Get:287 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-dri2-0-dev amd64 1.13-2~ubuntu18.04 [8472 B]\n", - "Get:288 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxcb-glx0-dev amd64 1.13-2~ubuntu18.04 [27.9 kB]\n", - "Get:289 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xf86vidmode-dev all 2018.4-4 [2632 B]\n", - "Get:290 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86vm-dev amd64 1:1.1.4-1 [13.3 kB]\n", - "Get:291 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libgl1-mesa-dev amd64 19.2.8-0ubuntu0~18.04.3 [6180 B]\n", - "Get:292 http://archive.ubuntu.com/ubuntu bionic/main amd64 libglu1-mesa-dev amd64 9.0.0-2.1build1 [206 kB]\n", - "Get:293 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgmpxx4ldbl amd64 2:6.1.2+dfsg-2 [8964 B]\n", - "Get:294 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgmp-dev amd64 2:6.1.2+dfsg-2 [316 kB]\n", - "Get:295 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgts-0.7-5 amd64 0.7.6+darcs121130-4 [150 kB]\n", - "Get:296 http://archive.ubuntu.com/ubuntu bionic/main amd64 libltdl7 amd64 2.4.6-2 [38.8 kB]\n", - "Get:297 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libpathplan4 amd64 2.40.1-2 [22.6 kB]\n", - "Get:298 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgvc6 amd64 2.40.1-2 [601 kB]\n", - "Get:299 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgvpr2 amd64 2.40.1-2 [169 kB]\n", - "Get:300 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libxdot4 amd64 2.40.1-2 [15.7 kB]\n", - "Get:301 http://archive.ubuntu.com/ubuntu bionic/universe amd64 liblab-gamut1 amd64 2.40.1-2 [178 kB]\n", - "Get:302 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgvc6-plugins-gtk amd64 2.40.1-2 [18.2 kB]\n", - "Get:303 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgraphviz-dev amd64 2.40.1-2 [57.3 kB]\n", - "Get:304 http://archive.ubuntu.com/ubuntu bionic/main amd64 libgtk2.0-bin amd64 2.24.32-1ubuntu1 [7536 B]\n", - "Get:305 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libgts-bin amd64 0.7.6+darcs121130-4 [41.3 kB]\n", - "Get:306 http://archive.ubuntu.com/ubuntu bionic/main amd64 libltdl-dev amd64 2.4.6-2 [162 kB]\n", - "Get:307 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libmad0 amd64 0.15.1b-9ubuntu18.04.1 [64.6 kB]\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Get:308 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libmad0-dev amd64 0.15.1b-9ubuntu18.04.1 [64.4 kB]\n", - "Get:309 http://archive.ubuntu.com/ubuntu bionic/main amd64 libsys-hostname-long-perl all 1.5-1 [11.7 kB]\n", - "Get:310 http://archive.ubuntu.com/ubuntu bionic/main amd64 libmail-sendmail-perl all 0.80-1 [22.6 kB]\n", - "Get:311 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmikmod-config amd64 3.3.11.1-3 [5184 B]\n", - "Get:312 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libopenal-data all 1:1.18.2-2 [102 kB]\n", - "Get:313 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsndio6.1 amd64 1.1.0-3 [23.4 kB]\n", - "Get:314 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libopenal1 amd64 1:1.18.2-2 [266 kB]\n", - "Get:315 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-cursor0 amd64 1.16.0-1ubuntu1.1~18.04.3 [10.1 kB]\n", - "Get:316 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-egl1 amd64 1.16.0-1ubuntu1.1~18.04.3 [5464 B]\n", - "Get:317 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libwayland-egl1-mesa amd64 19.2.8-0ubuntu0~18.04.3 [6892 B]\n", - "Get:318 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libxkbcommon0 amd64 0.8.2-1~ubuntu18.04.1 [97.8 kB]\n", - "Get:319 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libsdl2-2.0-0 amd64 2.0.8+dfsg1-1ubuntu1.18.04.4 [382 kB]\n", - "Get:320 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmikmod3 amd64 3.3.11.1-3 [113 kB]\n", - "Get:321 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmikmod-dev amd64 3.3.11.1-3 [196 kB]\n", - "Get:322 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmodplug1 amd64 1:0.8.9.0-1 [150 kB]\n", - "Get:323 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libmodplug-dev amd64 1:0.8.9.0-1 [14.6 kB]\n", - "Get:324 http://archive.ubuntu.com/ubuntu bionic/main amd64 libproxy1v5 amd64 0.4.15-1 [49.5 kB]\n", - "Get:325 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libproxy-tools amd64 0.4.15-1 [5312 B]\n", - "Get:326 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpulse-mainloop-glib0 amd64 1:11.1-1ubuntu7.4 [22.1 kB]\n", - "Get:327 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libpulse-dev amd64 1:11.1-1ubuntu7.4 [81.5 kB]\n", - "Get:328 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libsdl1.2debian amd64 1.2.15+dfsg2-0.1ubuntu0.1 [175 kB]\n", - "Get:329 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbisfile3 amd64 1.3.5-4.2 [16.0 kB]\n", - "Get:330 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsdl-mixer1.2 amd64 1.2.12-14 [72.0 kB]\n", - "Get:331 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libsdl1.2-dev amd64 1.2.15+dfsg2-0.1ubuntu0.1 [706 kB]\n", - "Get:332 http://archive.ubuntu.com/ubuntu bionic/main amd64 libvorbis-dev amd64 1.3.5-4.2 [321 kB]\n", - "Get:333 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsdl-mixer1.2-dev amd64 1.2.12-14 [90.6 kB]\n", - "Get:334 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libserf-1-1 amd64 1.3.9-6 [44.4 kB]\n", - "Get:335 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libsvn1 amd64 1.9.7-4ubuntu1 [1183 kB]\n", - "Get:336 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlccore9 amd64 3.0.8-0ubuntu18.04.1 [434 kB]\n", - "Get:337 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlc5 amd64 3.0.8-0ubuntu18.04.1 [68.0 kB]\n", - "Get:338 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlc-bin amd64 3.0.8-0ubuntu18.04.1 [17.1 kB]\n", - "Get:339 http://archive.ubuntu.com/ubuntu bionic-updates/universe amd64 libvlc-dev amd64 3.0.8-0ubuntu18.04.1 [60.1 kB]\n", - "Get:340 http://archive.ubuntu.com/ubuntu bionic-updates/main amd64 libx11-doc all 2:1.6.4-3ubuntu0.2 [2065 kB]\n", - "Get:341 http://archive.ubuntu.com/ubuntu bionic/main amd64 x11proto-xf86dga-dev all 2018.4-4 [2624 B]\n", - "Get:342 http://archive.ubuntu.com/ubuntu bionic/main amd64 libxxf86dga-dev amd64 2:1.1.4-1 [17.6 kB]\n", - "Get:343 http://archive.ubuntu.com/ubuntu bionic/universe amd64 subversion amd64 1.9.7-4ubuntu1 [834 kB]\n", - "Get:344 http://archive.ubuntu.com/ubuntu bionic/universe amd64 timgm6mb-soundfont all 1.3-2 [5423 kB]\n", - "Get:345 http://archive.ubuntu.com/ubuntu bionic/main amd64 zip amd64 3.0-11build1 [167 kB]\n", - "Get:346 http://archive.ubuntu.com/ubuntu bionic/main amd64 libc6-dbg amd64 2.27-3ubuntu1 [5161 kB]\n", - "Get:347 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libdca0 amd64 0.0.5-10 [100.0 kB]\n", - "Get:348 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libdca-dev amd64 0.0.5-10 [90.2 kB]\n", - "Get:349 http://archive.ubuntu.com/ubuntu bionic/universe amd64 libdts-dev amd64 0.0.5-10 [3068 B]\n", - "Fetched 243 MB in 16s (15.2 MB/s) \n", - "debconf: delaying package configuration, since apt-utils is not installed\n", - "Selecting previously unselected package libxau6:amd64.\n", - "(Reading database ... 17806 files and directories currently installed.)\n", - "Preparing to unpack .../00-libxau6_1%3a1.0.8-1_amd64.deb ...\n", - "Unpacking libxau6:amd64 (1:1.0.8-1) ...\n", - "Selecting previously unselected package libxdmcp6:amd64.\n", - "Preparing to unpack .../01-libxdmcp6_1%3a1.1.2-3_amd64.deb ...\n", - "Unpacking libxdmcp6:amd64 (1:1.1.2-3) ...\n", - "Selecting previously unselected package libxcb1:amd64.\n", - "Preparing to unpack .../02-libxcb1_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libx11-data.\n", - "Preparing to unpack .../03-libx11-data_2%3a1.6.4-3ubuntu0.2_all.deb ...\n", - "Unpacking libx11-data (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libx11-6:amd64.\n", - "Preparing to unpack .../04-libx11-6_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-6:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libxext6:amd64.\n", - "Preparing to unpack .../05-libxext6_2%3a1.3.3-1_amd64.deb ...\n", - "Unpacking libxext6:amd64 (2:1.3.3-1) ...\n", - "Selecting previously unselected package bsdmainutils.\n", - "Preparing to unpack .../06-bsdmainutils_11.1.2ubuntu1_amd64.deb ...\n", - "Unpacking bsdmainutils (11.1.2ubuntu1) ...\n", - "Selecting previously unselected package groff-base.\n", - "Preparing to unpack .../07-groff-base_1.22.3-10_amd64.deb ...\n", - "Unpacking groff-base (1.22.3-10) ...\n", - "Selecting previously unselected package libpipeline1:amd64.\n", - "Preparing to unpack .../08-libpipeline1_1.5.0-1_amd64.deb ...\n", - "Unpacking libpipeline1:amd64 (1.5.0-1) ...\n", - "Selecting previously unselected package man-db.\n", - "Preparing to unpack .../09-man-db_2.8.3-2ubuntu0.1_amd64.deb ...\n", - "Unpacking man-db (2.8.3-2ubuntu0.1) ...\n", - "Selecting previously unselected package fonts-dejavu-core.\n", - "Preparing to unpack .../10-fonts-dejavu-core_2.37-1_all.deb ...\n", - "Unpacking fonts-dejavu-core (2.37-1) ...\n", - "Selecting previously unselected package fontconfig-config.\n", - "Preparing to unpack .../11-fontconfig-config_2.12.6-0ubuntu2_all.deb ...\n", - "Unpacking fontconfig-config (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package libfontconfig1:amd64.\n", - "Preparing to unpack .../12-libfontconfig1_2.12.6-0ubuntu2_amd64.deb ...\n", - "Unpacking libfontconfig1:amd64 (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package fontconfig.\n", - "Preparing to unpack .../13-fontconfig_2.12.6-0ubuntu2_amd64.deb ...\n", - "Unpacking fontconfig (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package liblzo2-2:amd64.\n", - "Preparing to unpack .../14-liblzo2-2_2.08-1.2_amd64.deb ...\n", - "Unpacking liblzo2-2:amd64 (2.08-1.2) ...\n", - "Selecting previously unselected package libogg0:amd64.\n", - "Preparing to unpack .../15-libogg0_1.3.2-1_amd64.deb ...\n", - "Unpacking libogg0:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package x11-common.\n", - "Preparing to unpack .../16-x11-common_1%3a7.7+19ubuntu7.1_all.deb ...\n", - "dpkg-query: no packages found matching nux-tools\n", - "Unpacking x11-common (1:7.7+19ubuntu7.1) ...\n", - "Selecting previously unselected package libice6:amd64.\n", - "Preparing to unpack .../17-libice6_2%3a1.0.9-2_amd64.deb ...\n", - "Unpacking libice6:amd64 (2:1.0.9-2) ...\n", - "Selecting previously unselected package libsm6:amd64.\n", - "Preparing to unpack .../18-libsm6_2%3a1.2.2-1_amd64.deb ...\n", - "Unpacking libsm6:amd64 (2:1.2.2-1) ...\n", - "Selecting previously unselected package libxrender1:amd64.\n", - "Preparing to unpack .../19-libxrender1_1%3a0.9.10-1_amd64.deb ...\n", - "Unpacking libxrender1:amd64 (1:0.9.10-1) ...\n", - "Selecting previously unselected package libxft2:amd64.\n", - "Preparing to unpack .../20-libxft2_2.3.2-1_amd64.deb ...\n", - "Unpacking libxft2:amd64 (2.3.2-1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libxinerama1:amd64.\n", - "Preparing to unpack .../21-libxinerama1_2%3a1.1.3-1_amd64.deb ...\n", - "Unpacking libxinerama1:amd64 (2:1.1.3-1) ...\n", - "Selecting previously unselected package libxss1:amd64.\n", - "Preparing to unpack .../22-libxss1_1%3a1.2.2-1_amd64.deb ...\n", - "Unpacking libxss1:amd64 (1:1.2.2-1) ...\n", - "Selecting previously unselected package libxxf86dga1:amd64.\n", - "Preparing to unpack .../23-libxxf86dga1_2%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86dga1:amd64 (2:1.1.4-1) ...\n", - "Selecting previously unselected package libxxf86vm1:amd64.\n", - "Preparing to unpack .../24-libxxf86vm1_1%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86vm1:amd64 (1:1.1.4-1) ...\n", - "Selecting previously unselected package libpython2.7-minimal:amd64.\n", - "Preparing to unpack .../25-libpython2.7-minimal_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking libpython2.7-minimal:amd64 (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package python2.7-minimal.\n", - "Preparing to unpack .../26-python2.7-minimal_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking python2.7-minimal (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package python-minimal.\n", - "Preparing to unpack .../27-python-minimal_2.7.15~rc1-1_amd64.deb ...\n", - "Unpacking python-minimal (2.7.15~rc1-1) ...\n", - "Selecting previously unselected package libpython2.7-stdlib:amd64.\n", - "Preparing to unpack .../28-libpython2.7-stdlib_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking libpython2.7-stdlib:amd64 (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package python2.7.\n", - "Preparing to unpack .../29-python2.7_2.7.17-1~18.04_amd64.deb ...\n", - "Unpacking python2.7 (2.7.17-1~18.04) ...\n", - "Selecting previously unselected package libpython-stdlib:amd64.\n", - "Preparing to unpack .../30-libpython-stdlib_2.7.15~rc1-1_amd64.deb ...\n", - "Unpacking libpython-stdlib:amd64 (2.7.15~rc1-1) ...\n", - "Setting up libpython2.7-minimal:amd64 (2.7.17-1~18.04) ...\n", - "Setting up python2.7-minimal (2.7.17-1~18.04) ...\n", - "Linking and byte-compiling packages for runtime python2.7...\n", - "Setting up python-minimal (2.7.15~rc1-1) ...\n", - "Selecting previously unselected package python.\n", - "(Reading database ... 19614 files and directories currently installed.)\n", - "Preparing to unpack .../000-python_2.7.15~rc1-1_amd64.deb ...\n", - "Unpacking python (2.7.15~rc1-1) ...\n", - "Selecting previously unselected package libmagic-mgc.\n", - "Preparing to unpack .../001-libmagic-mgc_1%3a5.32-2ubuntu0.3_amd64.deb ...\n", - "Unpacking libmagic-mgc (1:5.32-2ubuntu0.3) ...\n", - "Selecting previously unselected package libmagic1:amd64.\n", - "Preparing to unpack .../002-libmagic1_1%3a5.32-2ubuntu0.3_amd64.deb ...\n", - "Unpacking libmagic1:amd64 (1:5.32-2ubuntu0.3) ...\n", - "Selecting previously unselected package file.\n", - "Preparing to unpack .../003-file_1%3a5.32-2ubuntu0.3_amd64.deb ...\n", - "Unpacking file (1:5.32-2ubuntu0.3) ...\n", - "Selecting previously unselected package libelf1:amd64.\n", - "Preparing to unpack .../004-libelf1_0.170-0.4ubuntu0.1_amd64.deb ...\n", - "Unpacking libelf1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Preparing to unpack .../005-libglib2.0-0_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-0:amd64 (2.56.4-0ubuntu0.18.04.6) over (2.56.4-0ubuntu0.18.04.4) ...\n", - "Selecting previously unselected package libglib2.0-data.\n", - "Preparing to unpack .../006-libglib2.0-data_2.56.4-0ubuntu0.18.04.6_all.deb ...\n", - "Unpacking libglib2.0-data (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package libicu60:amd64.\n", - "Preparing to unpack .../007-libicu60_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking libicu60:amd64 (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package libidn11:amd64.\n", - "Preparing to unpack .../008-libidn11_1.33-2.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libidn11:amd64 (1.33-2.1ubuntu1.2) ...\n", - "Selecting previously unselected package libslang2:amd64.\n", - "Preparing to unpack .../009-libslang2_2.3.1a-3ubuntu1_amd64.deb ...\n", - "Unpacking libslang2:amd64 (2.3.1a-3ubuntu1) ...\n", - "Selecting previously unselected package libxml2:amd64.\n", - "Preparing to unpack .../010-libxml2_2.9.4+dfsg1-6.1ubuntu1.3_amd64.deb ...\n", - "Unpacking libxml2:amd64 (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Selecting previously unselected package shared-mime-info.\n", - "Preparing to unpack .../011-shared-mime-info_1.9-2_amd64.deb ...\n", - "Unpacking shared-mime-info (1.9-2) ...\n", - "Selecting previously unselected package xkb-data.\n", - "Preparing to unpack .../012-xkb-data_2.23.1-1ubuntu1.18.04.1_all.deb ...\n", - "Unpacking xkb-data (2.23.1-1ubuntu1.18.04.1) ...\n", - "Selecting previously unselected package gettext-base.\n", - "Preparing to unpack .../013-gettext-base_0.19.8.1-6ubuntu0.3_amd64.deb ...\n", - "Unpacking gettext-base (0.19.8.1-6ubuntu0.3) ...\n", - "Selecting previously unselected package libdrm-common.\n", - "Preparing to unpack .../014-libdrm-common_2.4.99-1ubuntu1~18.04.2_all.deb ...\n", - "Unpacking libdrm-common (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm2:amd64.\n", - "Preparing to unpack .../015-libdrm2_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package hicolor-icon-theme.\n", - "Preparing to unpack .../016-hicolor-icon-theme_0.17-2_all.deb ...\n", - "Unpacking hicolor-icon-theme (0.17-2) ...\n", - "Selecting previously unselected package libjbig0:amd64.\n", - "Preparing to unpack .../017-libjbig0_2.1-3.1build1_amd64.deb ...\n", - "Unpacking libjbig0:amd64 (2.1-3.1build1) ...\n", - "Selecting previously unselected package libtiff5:amd64.\n", - "Preparing to unpack .../018-libtiff5_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiff5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-common.\n", - "Preparing to unpack .../019-libgdk-pixbuf2.0-common_2.36.11-2_all.deb ...\n", - "Unpacking libgdk-pixbuf2.0-common (2.36.11-2) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-0:amd64.\n", - "Preparing to unpack .../020-libgdk-pixbuf2.0-0_2.36.11-2_amd64.deb ...\n", - "Unpacking libgdk-pixbuf2.0-0:amd64 (2.36.11-2) ...\n", - "Selecting previously unselected package gtk-update-icon-cache.\n", - "Preparing to unpack .../021-gtk-update-icon-cache_3.22.30-1ubuntu4_amd64.deb ...\n", - "No diversion 'diversion of /usr/sbin/update-icon-caches to /usr/sbin/update-icon-caches.gtk2 by libgtk-3-bin', none removed.\n", - "No diversion 'diversion of /usr/share/man/man8/update-icon-caches.8.gz to /usr/share/man/man8/update-icon-caches.gtk2.8.gz by libgtk-3-bin', none removed.\n", - "Unpacking gtk-update-icon-cache (3.22.30-1ubuntu4) ...\n", - "Selecting previously unselected package libpixman-1-0:amd64.\n", - "Preparing to unpack .../022-libpixman-1-0_0.34.0-2_amd64.deb ...\n", - "Unpacking libpixman-1-0:amd64 (0.34.0-2) ...\n", - "Selecting previously unselected package libxcb-render0:amd64.\n", - "Preparing to unpack .../023-libxcb-render0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-render0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shm0:amd64.\n", - "Preparing to unpack .../024-libxcb-shm0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shm0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libcairo2:amd64.\n", - "Preparing to unpack .../025-libcairo2_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libcroco3:amd64.\n", - "Preparing to unpack .../026-libcroco3_0.6.12-2_amd64.deb ...\n", - "Unpacking libcroco3:amd64 (0.6.12-2) ...\n", - "Selecting previously unselected package libthai-data.\n", - "Preparing to unpack .../027-libthai-data_0.1.27-2_all.deb ...\n", - "Unpacking libthai-data (0.1.27-2) ...\n", - "Selecting previously unselected package libdatrie1:amd64.\n", - "Preparing to unpack .../028-libdatrie1_0.2.10-7_amd64.deb ...\n", - "Unpacking libdatrie1:amd64 (0.2.10-7) ...\n", - "Selecting previously unselected package libthai0:amd64.\n", - "Preparing to unpack .../029-libthai0_0.1.27-2_amd64.deb ...\n", - "Unpacking libthai0:amd64 (0.1.27-2) ...\n", - "Selecting previously unselected package libpango-1.0-0:amd64.\n", - "Preparing to unpack .../030-libpango-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpango-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package libgraphite2-3:amd64.\n", - "Preparing to unpack .../031-libgraphite2-3_1.3.11-2_amd64.deb ...\n", - "Unpacking libgraphite2-3:amd64 (1.3.11-2) ...\n", - "Selecting previously unselected package libharfbuzz0b:amd64.\n", - "Preparing to unpack .../032-libharfbuzz0b_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking libharfbuzz0b:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libpangoft2-1.0-0:amd64.\n", - "Preparing to unpack .../033-libpangoft2-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpangoft2-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libpangocairo-1.0-0:amd64.\n", - "Preparing to unpack .../034-libpangocairo-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpangocairo-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package librsvg2-2:amd64.\n", - "Preparing to unpack .../035-librsvg2-2_2.40.20-2_amd64.deb ...\n", - "Unpacking librsvg2-2:amd64 (2.40.20-2) ...\n", - "Selecting previously unselected package librsvg2-common:amd64.\n", - "Preparing to unpack .../036-librsvg2-common_2.40.20-2_amd64.deb ...\n", - "Unpacking librsvg2-common:amd64 (2.40.20-2) ...\n", - "Selecting previously unselected package humanity-icon-theme.\n", - "Preparing to unpack .../037-humanity-icon-theme_0.6.15_all.deb ...\n", - "Unpacking humanity-icon-theme (0.6.15) ...\n", - "Selecting previously unselected package ubuntu-mono.\n", - "Preparing to unpack .../038-ubuntu-mono_16.10+18.04.20181005-0ubuntu1_all.deb ...\n", - "Unpacking ubuntu-mono (16.10+18.04.20181005-0ubuntu1) ...\n", - "Selecting previously unselected package adwaita-icon-theme.\n", - "Preparing to unpack .../039-adwaita-icon-theme_3.28.0-1ubuntu1_all.deb ...\n", - "Unpacking adwaita-icon-theme (3.28.0-1ubuntu1) ...\n", - "Selecting previously unselected package libsigsegv2:amd64.\n", - "Preparing to unpack .../040-libsigsegv2_2.12-1_amd64.deb ...\n", - "Unpacking libsigsegv2:amd64 (2.12-1) ...\n", - "Selecting previously unselected package m4.\n", - "Preparing to unpack .../041-m4_1.4.18-1_amd64.deb ...\n", - "Unpacking m4 (1.4.18-1) ...\n", - "Selecting previously unselected package autoconf.\n", - "Preparing to unpack .../042-autoconf_2.69-11_all.deb ...\n", - "Unpacking autoconf (2.69-11) ...\n", - "Selecting previously unselected package autotools-dev.\n", - "Preparing to unpack .../043-autotools-dev_20180224.1_all.deb ...\n", - "Unpacking autotools-dev (20180224.1) ...\n", - "Selecting previously unselected package automake.\n", - "Preparing to unpack .../044-automake_1%3a1.15.1-3ubuntu2_all.deb ...\n", - "Unpacking automake (1:1.15.1-3ubuntu2) ...\n", - "Selecting previously unselected package autopoint.\n", - "Preparing to unpack .../045-autopoint_0.19.8.1-6ubuntu0.3_all.deb ...\n", - "Unpacking autopoint (0.19.8.1-6ubuntu0.3) ...\n", - "Selecting previously unselected package libtool.\n", - "Preparing to unpack .../046-libtool_2.4.6-2_all.deb ...\n", - "Unpacking libtool (2.4.6-2) ...\n", - "Selecting previously unselected package dh-autoreconf.\n", - "Preparing to unpack .../047-dh-autoreconf_17_all.deb ...\n", - "Unpacking dh-autoreconf (17) ...\n", - "Selecting previously unselected package libarchive-zip-perl.\n", - "Preparing to unpack .../048-libarchive-zip-perl_1.60-1ubuntu0.1_all.deb ...\n", - "Unpacking libarchive-zip-perl (1.60-1ubuntu0.1) ...\n", - "Selecting previously unselected package libfile-stripnondeterminism-perl.\n", - "Preparing to unpack .../049-libfile-stripnondeterminism-perl_0.040-1.1~build1_all.deb ...\n", - "Unpacking libfile-stripnondeterminism-perl (0.040-1.1~build1) ...\n", - "Selecting previously unselected package libtimedate-perl.\n", - "Preparing to unpack .../050-libtimedate-perl_2.3000-2_all.deb ...\n", - "Unpacking libtimedate-perl (2.3000-2) ...\n", - "Selecting previously unselected package dh-strip-nondeterminism.\n", - "Preparing to unpack .../051-dh-strip-nondeterminism_0.040-1.1~build1_all.deb ...\n", - "Unpacking dh-strip-nondeterminism (0.040-1.1~build1) ...\n", - "Selecting previously unselected package gettext.\n", - "Preparing to unpack .../052-gettext_0.19.8.1-6ubuntu0.3_amd64.deb ...\n", - "Unpacking gettext (0.19.8.1-6ubuntu0.3) ...\n", - "Selecting previously unselected package intltool-debian.\n", - "Preparing to unpack .../053-intltool-debian_0.35.0+20060710.4_all.deb ...\n", - "Unpacking intltool-debian (0.35.0+20060710.4) ...\n", - "Selecting previously unselected package po-debconf.\n", - "Preparing to unpack .../054-po-debconf_1.0.20_all.deb ...\n", - "Unpacking po-debconf (1.0.20) ...\n", - "Selecting previously unselected package debhelper.\n", - "Preparing to unpack .../055-debhelper_11.1.6ubuntu2_all.deb ...\n", - "Unpacking debhelper (11.1.6ubuntu2) ...\n", - "Selecting previously unselected package fp-units-rtl-3.0.4:amd64.\n", - "Preparing to unpack .../056-fp-units-rtl-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-rtl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-compiler-3.0.4:amd64.\n", - "Preparing to unpack .../057-fp-compiler-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-compiler-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-docs-3.0.4.\n", - "Preparing to unpack .../058-fp-docs-3.0.4_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fp-docs-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-ide-3.0.4.\n", - "Preparing to unpack .../059-fp-ide-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-ide-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-base-3.0.4:amd64.\n", - "Preparing to unpack .../060-fp-units-base-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-base-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-db-3.0.4:amd64.\n", - "Preparing to unpack .../061-fp-units-db-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-db-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-fcl-3.0.4:amd64.\n", - "Preparing to unpack .../062-fp-units-fcl-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-fcl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-fv-3.0.4:amd64.\n", - "Preparing to unpack .../063-fp-units-fv-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-fv-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-gfx-3.0.4:amd64.\n", - "Preparing to unpack .../064-fp-units-gfx-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-gfx-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package libgtk2.0-common.\n", - "Preparing to unpack .../065-libgtk2.0-common_2.24.32-1ubuntu1_all.deb ...\n", - "Unpacking libgtk2.0-common (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libatk1.0-data.\n", - "Preparing to unpack .../066-libatk1.0-data_2.28.1-1_all.deb ...\n", - "Unpacking libatk1.0-data (2.28.1-1) ...\n", - "Selecting previously unselected package libatk1.0-0:amd64.\n", - "Preparing to unpack .../067-libatk1.0-0_2.28.1-1_amd64.deb ...\n", - "Unpacking libatk1.0-0:amd64 (2.28.1-1) ...\n", - "Selecting previously unselected package libavahi-common-data:amd64.\n", - "Preparing to unpack .../068-libavahi-common-data_0.7-3.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libavahi-common-data:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Selecting previously unselected package libavahi-common3:amd64.\n", - "Preparing to unpack .../069-libavahi-common3_0.7-3.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libavahi-common3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Selecting previously unselected package libavahi-client3:amd64.\n", - "Preparing to unpack .../070-libavahi-client3_0.7-3.1ubuntu1.2_amd64.deb ...\n", - "Unpacking libavahi-client3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Selecting previously unselected package libcups2:amd64.\n", - "Preparing to unpack .../071-libcups2_2.2.7-1ubuntu2.7_amd64.deb ...\n", - "Unpacking libcups2:amd64 (2.2.7-1ubuntu2.7) ...\n", - "Selecting previously unselected package libxcomposite1:amd64.\n", - "Preparing to unpack .../072-libxcomposite1_1%3a0.4.4-2_amd64.deb ...\n", - "Unpacking libxcomposite1:amd64 (1:0.4.4-2) ...\n", - "Selecting previously unselected package libxfixes3:amd64.\n", - "Preparing to unpack .../073-libxfixes3_1%3a5.0.3-1_amd64.deb ...\n", - "Unpacking libxfixes3:amd64 (1:5.0.3-1) ...\n", - "Selecting previously unselected package libxcursor1:amd64.\n", - "Preparing to unpack .../074-libxcursor1_1%3a1.1.15-1_amd64.deb ...\n", - "Unpacking libxcursor1:amd64 (1:1.1.15-1) ...\n", - "Selecting previously unselected package libxdamage1:amd64.\n", - "Preparing to unpack .../075-libxdamage1_1%3a1.1.4-3_amd64.deb ...\n", - "Unpacking libxdamage1:amd64 (1:1.1.4-3) ...\n", - "Selecting previously unselected package libxi6:amd64.\n", - "Preparing to unpack .../076-libxi6_2%3a1.7.9-1_amd64.deb ...\n", - "Unpacking libxi6:amd64 (2:1.7.9-1) ...\n", - "Selecting previously unselected package libxrandr2:amd64.\n", - "Preparing to unpack .../077-libxrandr2_2%3a1.5.1-1_amd64.deb ...\n", - "Unpacking libxrandr2:amd64 (2:1.5.1-1) ...\n", - "Selecting previously unselected package libgtk2.0-0:amd64.\n", - "Preparing to unpack .../078-libgtk2.0-0_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgtk2.0-0:amd64 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package gir1.2-atk-1.0:amd64.\n", - "Preparing to unpack .../079-gir1.2-atk-1.0_2.28.1-1_amd64.deb ...\n", - "Unpacking gir1.2-atk-1.0:amd64 (2.28.1-1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package gir1.2-freedesktop:amd64.\n", - "Preparing to unpack .../080-gir1.2-freedesktop_1.56.1-1_amd64.deb ...\n", - "Unpacking gir1.2-freedesktop:amd64 (1.56.1-1) ...\n", - "Selecting previously unselected package gir1.2-gdkpixbuf-2.0:amd64.\n", - "Preparing to unpack .../081-gir1.2-gdkpixbuf-2.0_2.36.11-2_amd64.deb ...\n", - "Unpacking gir1.2-gdkpixbuf-2.0:amd64 (2.36.11-2) ...\n", - "Selecting previously unselected package libpangoxft-1.0-0:amd64.\n", - "Preparing to unpack .../082-libpangoxft-1.0-0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpangoxft-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package gir1.2-pango-1.0:amd64.\n", - "Preparing to unpack .../083-gir1.2-pango-1.0_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking gir1.2-pango-1.0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package gir1.2-gtk-2.0.\n", - "Preparing to unpack .../084-gir1.2-gtk-2.0_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking gir1.2-gtk-2.0 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libglib2.0-bin.\n", - "Preparing to unpack .../085-libglib2.0-bin_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package libglib2.0-dev-bin.\n", - "Preparing to unpack .../086-libglib2.0-dev-bin_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-dev-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package libpcre16-3:amd64.\n", - "Preparing to unpack .../087-libpcre16-3_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcre16-3:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libpcre32-3:amd64.\n", - "Preparing to unpack .../088-libpcre32-3_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcre32-3:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libpcrecpp0v5:amd64.\n", - "Preparing to unpack .../089-libpcrecpp0v5_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcrecpp0v5:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libpcre3-dev:amd64.\n", - "Preparing to unpack .../090-libpcre3-dev_2%3a8.39-9_amd64.deb ...\n", - "Unpacking libpcre3-dev:amd64 (2:8.39-9) ...\n", - "Selecting previously unselected package libglib2.0-dev:amd64.\n", - "Preparing to unpack .../091-libglib2.0-dev_2.56.4-0ubuntu0.18.04.6_amd64.deb ...\n", - "Unpacking libglib2.0-dev:amd64 (2.56.4-0ubuntu0.18.04.6) ...\n", - "Selecting previously unselected package xorg-sgml-doctools.\n", - "Preparing to unpack .../092-xorg-sgml-doctools_1%3a1.11-1_all.deb ...\n", - "Unpacking xorg-sgml-doctools (1:1.11-1) ...\n", - "Selecting previously unselected package x11proto-dev.\n", - "Preparing to unpack .../093-x11proto-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-dev (2018.4-4) ...\n", - "Selecting previously unselected package x11proto-core-dev.\n", - "Preparing to unpack .../094-x11proto-core-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-core-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxau-dev:amd64.\n", - "Preparing to unpack .../095-libxau-dev_1%3a1.0.8-1_amd64.deb ...\n", - "Unpacking libxau-dev:amd64 (1:1.0.8-1) ...\n", - "Selecting previously unselected package libxdmcp-dev:amd64.\n", - "Preparing to unpack .../096-libxdmcp-dev_1%3a1.1.2-3_amd64.deb ...\n", - "Unpacking libxdmcp-dev:amd64 (1:1.1.2-3) ...\n", - "Selecting previously unselected package x11proto-input-dev.\n", - "Preparing to unpack .../097-x11proto-input-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-input-dev (2018.4-4) ...\n", - "Selecting previously unselected package xtrans-dev.\n", - "Preparing to unpack .../098-xtrans-dev_1.3.5-1_all.deb ...\n", - "Unpacking xtrans-dev (1.3.5-1) ...\n", - "Selecting previously unselected package libpthread-stubs0-dev:amd64.\n", - "Preparing to unpack .../099-libpthread-stubs0-dev_0.3-4_amd64.deb ...\n", - "Unpacking libpthread-stubs0-dev:amd64 (0.3-4) ...\n", - "Selecting previously unselected package libxcb1-dev:amd64.\n", - "Preparing to unpack .../100-libxcb1-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb1-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libx11-dev:amd64.\n", - "Preparing to unpack .../101-libx11-dev_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-dev.\n", - "Preparing to unpack .../102-libgdk-pixbuf2.0-dev_2.36.11-2_amd64.deb ...\n", - "Unpacking libgdk-pixbuf2.0-dev (2.36.11-2) ...\n", - "Selecting previously unselected package libcairo-gobject2:amd64.\n", - "Preparing to unpack .../103-libcairo-gobject2_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo-gobject2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libcairo-script-interpreter2:amd64.\n", - "Preparing to unpack .../104-libcairo-script-interpreter2_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo-script-interpreter2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libfontconfig1-dev:amd64.\n", - "Preparing to unpack .../105-libfontconfig1-dev_2.12.6-0ubuntu2_amd64.deb ...\n", - "Unpacking libfontconfig1-dev:amd64 (2.12.6-0ubuntu2) ...\n", - "Selecting previously unselected package libxrender-dev:amd64.\n", - "Preparing to unpack .../106-libxrender-dev_1%3a0.9.10-1_amd64.deb ...\n", - "Unpacking libxrender-dev:amd64 (1:0.9.10-1) ...\n", - "Selecting previously unselected package x11proto-xext-dev.\n", - "Preparing to unpack .../107-x11proto-xext-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xext-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxext-dev:amd64.\n", - "Preparing to unpack .../108-libxext-dev_2%3a1.3.3-1_amd64.deb ...\n", - "Unpacking libxext-dev:amd64 (2:1.3.3-1) ...\n", - "Selecting previously unselected package libice-dev:amd64.\n", - "Preparing to unpack .../109-libice-dev_2%3a1.0.9-2_amd64.deb ...\n", - "Unpacking libice-dev:amd64 (2:1.0.9-2) ...\n", - "Selecting previously unselected package libsm-dev:amd64.\n", - "Preparing to unpack .../110-libsm-dev_2%3a1.2.2-1_amd64.deb ...\n", - "Unpacking libsm-dev:amd64 (2:1.2.2-1) ...\n", - "Selecting previously unselected package libpixman-1-dev:amd64.\n", - "Preparing to unpack .../111-libpixman-1-dev_0.34.0-2_amd64.deb ...\n", - "Unpacking libpixman-1-dev:amd64 (0.34.0-2) ...\n", - "Selecting previously unselected package libxcb-render0-dev:amd64.\n", - "Preparing to unpack .../112-libxcb-render0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-render0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shm0-dev:amd64.\n", - "Preparing to unpack .../113-libxcb-shm0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shm0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libcairo2-dev:amd64.\n", - "Preparing to unpack .../114-libcairo2-dev_1.15.10-2ubuntu0.1_amd64.deb ...\n", - "Unpacking libcairo2-dev:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Selecting previously unselected package libharfbuzz-icu0:amd64.\n", - "Preparing to unpack .../115-libharfbuzz-icu0_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking libharfbuzz-icu0:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libharfbuzz-gobject0:amd64.\n", - "Preparing to unpack .../116-libharfbuzz-gobject0_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking libharfbuzz-gobject0:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package gir1.2-harfbuzz-0.0:amd64.\n", - "Preparing to unpack .../117-gir1.2-harfbuzz-0.0_1.7.2-1ubuntu1_amd64.deb ...\n", - "Unpacking gir1.2-harfbuzz-0.0:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libgraphite2-dev:amd64.\n", - "Preparing to unpack .../118-libgraphite2-dev_1.3.11-2_amd64.deb ...\n", - "Unpacking libgraphite2-dev:amd64 (1.3.11-2) ...\n", - "Selecting previously unselected package libicu-le-hb0:amd64.\n", - "Preparing to unpack .../119-libicu-le-hb0_1.0.3+git161113-4_amd64.deb ...\n", - "Unpacking libicu-le-hb0:amd64 (1.0.3+git161113-4) ...\n", - "Selecting previously unselected package libiculx60:amd64.\n", - "Preparing to unpack .../120-libiculx60_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking libiculx60:amd64 (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package icu-devtools.\n", - "Preparing to unpack .../121-icu-devtools_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking icu-devtools (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package libicu-le-hb-dev:amd64.\n", - "Preparing to unpack .../122-libicu-le-hb-dev_1.0.3+git161113-4_amd64.deb ...\n", - "Unpacking libicu-le-hb-dev:amd64 (1.0.3+git161113-4) ...\n", - "Selecting previously unselected package libicu-dev.\n", - "Preparing to unpack .../123-libicu-dev_60.2-3ubuntu3.1_amd64.deb ...\n", - "Unpacking libicu-dev (60.2-3ubuntu3.1) ...\n", - "Selecting previously unselected package libharfbuzz-dev:amd64.\n", - "Preparing to unpack .../124-libharfbuzz-dev_1.7.2-1ubuntu1_amd64.deb ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Unpacking libharfbuzz-dev:amd64 (1.7.2-1ubuntu1) ...\n", - "Selecting previously unselected package libxft-dev.\n", - "Preparing to unpack .../125-libxft-dev_2.3.2-1_amd64.deb ...\n", - "Unpacking libxft-dev (2.3.2-1) ...\n", - "Selecting previously unselected package libpango1.0-dev.\n", - "Preparing to unpack .../126-libpango1.0-dev_1.40.14-1ubuntu0.1_amd64.deb ...\n", - "Unpacking libpango1.0-dev (1.40.14-1ubuntu0.1) ...\n", - "Selecting previously unselected package libatk1.0-dev:amd64.\n", - "Preparing to unpack .../127-libatk1.0-dev_2.28.1-1_amd64.deb ...\n", - "Unpacking libatk1.0-dev:amd64 (2.28.1-1) ...\n", - "Selecting previously unselected package x11proto-xinerama-dev.\n", - "Preparing to unpack .../128-x11proto-xinerama-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xinerama-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxinerama-dev:amd64.\n", - "Preparing to unpack .../129-libxinerama-dev_2%3a1.1.3-1_amd64.deb ...\n", - "Unpacking libxinerama-dev:amd64 (2:1.1.3-1) ...\n", - "Selecting previously unselected package x11proto-fixes-dev.\n", - "Preparing to unpack .../130-x11proto-fixes-dev_1%3a2018.4-4_all.deb ...\n", - "Unpacking x11proto-fixes-dev (1:2018.4-4) ...\n", - "Selecting previously unselected package libxfixes-dev:amd64.\n", - "Preparing to unpack .../131-libxfixes-dev_1%3a5.0.3-1_amd64.deb ...\n", - "Unpacking libxfixes-dev:amd64 (1:5.0.3-1) ...\n", - "Selecting previously unselected package libxi-dev:amd64.\n", - "Preparing to unpack .../132-libxi-dev_2%3a1.7.9-1_amd64.deb ...\n", - "Unpacking libxi-dev:amd64 (2:1.7.9-1) ...\n", - "Selecting previously unselected package x11proto-randr-dev.\n", - "Preparing to unpack .../133-x11proto-randr-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-randr-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxrandr-dev:amd64.\n", - "Preparing to unpack .../134-libxrandr-dev_2%3a1.5.1-1_amd64.deb ...\n", - "Unpacking libxrandr-dev:amd64 (2:1.5.1-1) ...\n", - "Selecting previously unselected package libxcursor-dev:amd64.\n", - "Preparing to unpack .../135-libxcursor-dev_1%3a1.1.15-1_amd64.deb ...\n", - "Unpacking libxcursor-dev:amd64 (1:1.1.15-1) ...\n", - "Selecting previously unselected package x11proto-composite-dev.\n", - "Preparing to unpack .../136-x11proto-composite-dev_1%3a2018.4-4_all.deb ...\n", - "Unpacking x11proto-composite-dev (1:2018.4-4) ...\n", - "Selecting previously unselected package libxcomposite-dev:amd64.\n", - "Preparing to unpack .../137-libxcomposite-dev_1%3a0.4.4-2_amd64.deb ...\n", - "Unpacking libxcomposite-dev:amd64 (1:0.4.4-2) ...\n", - "Selecting previously unselected package x11proto-damage-dev.\n", - "Preparing to unpack .../138-x11proto-damage-dev_1%3a2018.4-4_all.deb ...\n", - "Unpacking x11proto-damage-dev (1:2018.4-4) ...\n", - "Selecting previously unselected package libxdamage-dev:amd64.\n", - "Preparing to unpack .../139-libxdamage-dev_1%3a1.1.4-3_amd64.deb ...\n", - "Unpacking libxdamage-dev:amd64 (1:1.1.4-3) ...\n", - "Selecting previously unselected package libxml2-utils.\n", - "Preparing to unpack .../140-libxml2-utils_2.9.4+dfsg1-6.1ubuntu1.3_amd64.deb ...\n", - "Unpacking libxml2-utils (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Selecting previously unselected package libgtk2.0-dev.\n", - "Preparing to unpack .../141-libgtk2.0-dev_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgtk2.0-dev (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package fp-units-gtk2-3.0.4:amd64.\n", - "Preparing to unpack .../142-fp-units-gtk2-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-gtk2-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-math-3.0.4:amd64.\n", - "Preparing to unpack .../143-fp-units-math-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-math-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-misc-3.0.4:amd64.\n", - "Preparing to unpack .../144-fp-units-misc-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-misc-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-multimedia-3.0.4:amd64.\n", - "Preparing to unpack .../145-fp-units-multimedia-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-multimedia-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-units-net-3.0.4:amd64.\n", - "Preparing to unpack .../146-fp-units-net-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-units-net-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc-source-3.0.4.\n", - "Preparing to unpack .../147-fpc-source-3.0.4_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc-source-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fp-utils-3.0.4.\n", - "Preparing to unpack .../148-fp-utils-3.0.4_3.0.4+dfsg-18ubuntu2_amd64.deb ...\n", - "Unpacking fp-utils-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc-3.0.4.\n", - "Preparing to unpack .../149-fpc-3.0.4_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc.\n", - "Preparing to unpack .../150-fpc_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package fpc-source.\n", - "Preparing to unpack .../151-fpc-source_3.0.4+dfsg-18ubuntu2_all.deb ...\n", - "Unpacking fpc-source (3.0.4+dfsg-18ubuntu2) ...\n", - "Selecting previously unselected package libdw1:amd64.\n", - "Preparing to unpack .../152-libdw1_0.170-0.4ubuntu0.1_amd64.deb ...\n", - "Unpacking libdw1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Selecting previously unselected package libbabeltrace1:amd64.\n", - "Preparing to unpack .../153-libbabeltrace1_1.5.5-1_amd64.deb ...\n", - "Unpacking libbabeltrace1:amd64 (1.5.5-1) ...\n", - "Selecting previously unselected package gdb.\n", - "Preparing to unpack .../154-gdb_8.1-0ubuntu3.2_amd64.deb ...\n", - "Unpacking gdb (8.1-0ubuntu3.2) ...\n", - "Selecting previously unselected package gdbserver.\n", - "Preparing to unpack .../155-gdbserver_8.1-0ubuntu3.2_amd64.deb ...\n", - "Unpacking gdbserver (8.1-0ubuntu3.2) ...\n", - "Selecting previously unselected package lazarus-ide-1.8.\n", - "Preparing to unpack .../156-lazarus-ide-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lazarus-ide-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-ide-gtk2-1.8.\n", - "Preparing to unpack .../157-lazarus-ide-gtk2-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lazarus-ide-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-ide.\n", - "Preparing to unpack .../158-lazarus-ide_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-ide (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-src-1.8.\n", - "Preparing to unpack .../159-lazarus-src-1.8_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-src-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-nogui-1.8.\n", - "Preparing to unpack .../160-lcl-nogui-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-nogui-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-gtk2-1.8.\n", - "Preparing to unpack .../161-lcl-gtk2-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-units-1.8.\n", - "Preparing to unpack .../162-lcl-units-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-units-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-utils-1.8.\n", - "Preparing to unpack .../163-lcl-utils-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-utils-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lcl-1.8.\n", - "Preparing to unpack .../164-lcl-1.8_1.8.2+dfsg-3_amd64.deb ...\n", - "Unpacking lcl-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-1.8.\n", - "Preparing to unpack .../165-lazarus-1.8_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus.\n", - "Preparing to unpack .../166-lazarus_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package lazarus-doc-1.8.\n", - "Preparing to unpack .../167-lazarus-doc-1.8_1.8.2+dfsg-3_all.deb ...\n", - "Unpacking lazarus-doc-1.8 (1.8.2+dfsg-3) ...\n", - "Selecting previously unselected package liba52-0.7.4:amd64.\n", - "Preparing to unpack .../168-liba52-0.7.4_0.7.4-19_amd64.deb ...\n", - "Unpacking liba52-0.7.4:amd64 (0.7.4-19) ...\n", - "Selecting previously unselected package liba52-0.7.4-dev.\n", - "Preparing to unpack .../169-liba52-0.7.4-dev_0.7.4-19_amd64.deb ...\n", - "Unpacking liba52-0.7.4-dev (0.7.4-19) ...\n", - "Selecting previously unselected package libapr1:amd64.\n", - "Preparing to unpack .../170-libapr1_1.6.3-2_amd64.deb ...\n", - "Unpacking libapr1:amd64 (1.6.3-2) ...\n", - "Selecting previously unselected package libaprutil1:amd64.\n", - "Preparing to unpack .../171-libaprutil1_1.6.1-2_amd64.deb ...\n", - "Unpacking libaprutil1:amd64 (1.6.1-2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libarchive-cpio-perl.\n", - "Preparing to unpack .../172-libarchive-cpio-perl_0.10-1_all.deb ...\n", - "Unpacking libarchive-cpio-perl (0.10-1) ...\n", - "Selecting previously unselected package libasound2-data.\n", - "Preparing to unpack .../173-libasound2-data_1.1.3-5ubuntu0.4_all.deb ...\n", - "Unpacking libasound2-data (1.1.3-5ubuntu0.4) ...\n", - "Selecting previously unselected package libasound2:amd64.\n", - "Preparing to unpack .../174-libasound2_1.1.3-5ubuntu0.4_amd64.deb ...\n", - "Unpacking libasound2:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Selecting previously unselected package libasound2-dev:amd64.\n", - "Preparing to unpack .../175-libasound2-dev_1.1.3-5ubuntu0.4_amd64.deb ...\n", - "Unpacking libasound2-dev:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Selecting previously unselected package libasyncns0:amd64.\n", - "Preparing to unpack .../176-libasyncns0_0.8-6_amd64.deb ...\n", - "Unpacking libasyncns0:amd64 (0.8-6) ...\n", - "Selecting previously unselected package libcaca0:amd64.\n", - "Preparing to unpack .../177-libcaca0_0.99.beta19-2ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libcaca0:amd64 (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libslang2-dev:amd64.\n", - "Preparing to unpack .../178-libslang2-dev_2.3.1a-3ubuntu1_amd64.deb ...\n", - "Unpacking libslang2-dev:amd64 (2.3.1a-3ubuntu1) ...\n", - "Selecting previously unselected package libcaca-dev.\n", - "Preparing to unpack .../179-libcaca-dev_0.99.beta19-2ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libcaca-dev (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libcdt5.\n", - "Preparing to unpack .../180-libcdt5_2.40.1-2_amd64.deb ...\n", - "Unpacking libcdt5 (2.40.1-2) ...\n", - "Selecting previously unselected package libcgraph6.\n", - "Preparing to unpack .../181-libcgraph6_2.40.1-2_amd64.deb ...\n", - "Unpacking libcgraph6 (2.40.1-2) ...\n", - "Selecting previously unselected package libdrm-amdgpu1:amd64.\n", - "Preparing to unpack .../182-libdrm-amdgpu1_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-amdgpu1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libpciaccess0:amd64.\n", - "Preparing to unpack .../183-libpciaccess0_0.14-1_amd64.deb ...\n", - "Unpacking libpciaccess0:amd64 (0.14-1) ...\n", - "Selecting previously unselected package libdrm-intel1:amd64.\n", - "Preparing to unpack .../184-libdrm-intel1_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-intel1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm-radeon1:amd64.\n", - "Preparing to unpack .../185-libdrm-radeon1_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-radeon1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm-nouveau2:amd64.\n", - "Preparing to unpack .../186-libdrm-nouveau2_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-nouveau2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libdrm-dev:amd64.\n", - "Preparing to unpack .../187-libdrm-dev_2.4.99-1ubuntu1~18.04.2_amd64.deb ...\n", - "Unpacking libdrm-dev:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Selecting previously unselected package libwayland-server0:amd64.\n", - "Preparing to unpack .../188-libwayland-server0_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-server0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libgbm1:amd64.\n", - "Preparing to unpack .../189-libgbm1_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libgbm1:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglapi-mesa:amd64.\n", - "Preparing to unpack .../190-libglapi-mesa_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libglapi-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libwayland-client0:amd64.\n", - "Preparing to unpack .../191-libwayland-client0_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-client0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libx11-xcb1:amd64.\n", - "Preparing to unpack .../192-libx11-xcb1_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-xcb1:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libxcb-dri2-0:amd64.\n", - "Preparing to unpack .../193-libxcb-dri2-0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri2-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-dri3-0:amd64.\n", - "Preparing to unpack .../194-libxcb-dri3-0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri3-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-present0:amd64.\n", - "Preparing to unpack .../195-libxcb-present0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-present0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-sync1:amd64.\n", - "Preparing to unpack .../196-libxcb-sync1_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-sync1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-xfixes0:amd64.\n", - "Preparing to unpack .../197-libxcb-xfixes0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-xfixes0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxshmfence1:amd64.\n", - "Preparing to unpack .../198-libxshmfence1_1.3-1_amd64.deb ...\n", - "Unpacking libxshmfence1:amd64 (1.3-1) ...\n", - "Selecting previously unselected package libegl-mesa0:amd64.\n", - "Preparing to unpack .../199-libegl-mesa0_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libegl-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libflac8:amd64.\n", - "Preparing to unpack .../200-libflac8_1.3.2-1_amd64.deb ...\n", - "Unpacking libflac8:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package libogg-dev:amd64.\n", - "Preparing to unpack .../201-libogg-dev_1.3.2-1_amd64.deb ...\n", - "Unpacking libogg-dev:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package libflac-dev:amd64.\n", - "Preparing to unpack .../202-libflac-dev_1.3.2-1_amd64.deb ...\n", - "Unpacking libflac-dev:amd64 (1.3.2-1) ...\n", - "Selecting previously unselected package libsamplerate0:amd64.\n", - "Preparing to unpack .../203-libsamplerate0_0.1.9-1_amd64.deb ...\n", - "Unpacking libsamplerate0:amd64 (0.1.9-1) ...\n", - "Selecting previously unselected package libjack-jackd2-0:amd64.\n", - "Preparing to unpack .../204-libjack-jackd2-0_1.9.12~dfsg-2_amd64.deb ...\n", - "Unpacking libjack-jackd2-0:amd64 (1.9.12~dfsg-2) ...\n", - "Selecting previously unselected package libvorbis0a:amd64.\n", - "Preparing to unpack .../205-libvorbis0a_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbis0a:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libvorbisenc2:amd64.\n", - "Preparing to unpack .../206-libvorbisenc2_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbisenc2:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libsndfile1:amd64.\n", - "Preparing to unpack .../207-libsndfile1_1.0.28-4ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libsndfile1:amd64 (1.0.28-4ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libpulse0:amd64.\n", - "Preparing to unpack .../208-libpulse0_1%3a11.1-1ubuntu7.4_amd64.deb ...\n", - "Unpacking libpulse0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Selecting previously unselected package libfluidsynth1:amd64.\n", - "Preparing to unpack .../209-libfluidsynth1_1.1.9-1_amd64.deb ...\n", - "Unpacking libfluidsynth1:amd64 (1.1.9-1) ...\n", - "Selecting previously unselected package libxpm4:amd64.\n", - "Preparing to unpack .../210-libxpm4_1%3a3.5.12-1_amd64.deb ...\n", - "Unpacking libxpm4:amd64 (1:3.5.12-1) ...\n", - "Selecting previously unselected package libforms2.\n", - "Preparing to unpack .../211-libforms2_1.2.3-1.3_amd64.deb ...\n", - "Unpacking libforms2 (1.2.3-1.3) ...\n", - "Selecting previously unselected package libxpm-dev:amd64.\n", - "Preparing to unpack .../212-libxpm-dev_1%3a3.5.12-1_amd64.deb ...\n", - "Unpacking libxpm-dev:amd64 (1:3.5.12-1) ...\n", - "Selecting previously unselected package libforms-dev.\n", - "Preparing to unpack .../213-libforms-dev_1.2.3-1.3_amd64.deb ...\n", - "Unpacking libforms-dev (1.2.3-1.3) ...\n", - "Selecting previously unselected package libgail18:amd64.\n", - "Preparing to unpack .../214-libgail18_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgail18:amd64 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libgail-common:amd64.\n", - "Preparing to unpack .../215-libgail-common_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgail-common:amd64 (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libwebp6:amd64.\n", - "Preparing to unpack .../216-libwebp6_0.6.1-2_amd64.deb ...\n", - "Unpacking libwebp6:amd64 (0.6.1-2) ...\n", - "Selecting previously unselected package libgd3:amd64.\n", - "Preparing to unpack .../217-libgd3_2.2.5-4ubuntu0.4_amd64.deb ...\n", - "Unpacking libgd3:amd64 (2.2.5-4ubuntu0.4) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package libxt6:amd64.\n", - "Preparing to unpack .../218-libxt6_1%3a1.1.5-1_amd64.deb ...\n", - "Unpacking libxt6:amd64 (1:1.1.5-1) ...\n", - "Selecting previously unselected package libxt-dev:amd64.\n", - "Preparing to unpack .../219-libxt-dev_1%3a1.1.5-1_amd64.deb ...\n", - "Unpacking libxt-dev:amd64 (1:1.1.5-1) ...\n", - "Selecting previously unselected package libvpx5:amd64.\n", - "Preparing to unpack .../220-libvpx5_1.7.0-3ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libvpx5:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libvpx-dev:amd64.\n", - "Preparing to unpack .../221-libvpx-dev_1.7.0-3ubuntu0.18.04.1_amd64.deb ...\n", - "Unpacking libvpx-dev:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Selecting previously unselected package libjbig-dev:amd64.\n", - "Preparing to unpack .../222-libjbig-dev_2.1-3.1build1_amd64.deb ...\n", - "Unpacking libjbig-dev:amd64 (2.1-3.1build1) ...\n", - "Selecting previously unselected package liblzma-dev:amd64.\n", - "Preparing to unpack .../223-liblzma-dev_5.2.2-1.3_amd64.deb ...\n", - "Unpacking liblzma-dev:amd64 (5.2.2-1.3) ...\n", - "Selecting previously unselected package libtiffxx5:amd64.\n", - "Preparing to unpack .../224-libtiffxx5_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiffxx5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libtiff5-dev:amd64.\n", - "Preparing to unpack .../225-libtiff5-dev_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiff5-dev:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libtiff-dev.\n", - "Preparing to unpack .../226-libtiff-dev_4.0.9-5ubuntu0.3_amd64.deb ...\n", - "Unpacking libtiff-dev (4.0.9-5ubuntu0.3) ...\n", - "Selecting previously unselected package libgd-dev:amd64.\n", - "Preparing to unpack .../227-libgd-dev_2.2.5-4ubuntu0.4_amd64.deb ...\n", - "Unpacking libgd-dev:amd64 (2.2.5-4ubuntu0.4) ...\n", - "Selecting previously unselected package libgdk-pixbuf2.0-bin.\n", - "Preparing to unpack .../228-libgdk-pixbuf2.0-bin_2.36.11-2_amd64.deb ...\n", - "Unpacking libgdk-pixbuf2.0-bin (2.36.11-2) ...\n", - "Selecting previously unselected package libllvm9:amd64.\n", - "Preparing to unpack .../229-libllvm9_1%3a9-2~ubuntu18.04.2_amd64.deb ...\n", - "Unpacking libllvm9:amd64 (1:9-2~ubuntu18.04.2) ...\n", - "Selecting previously unselected package libsensors4:amd64.\n", - "Preparing to unpack .../230-libsensors4_1%3a3.4.0-4_amd64.deb ...\n", - "Unpacking libsensors4:amd64 (1:3.4.0-4) ...\n", - "Selecting previously unselected package libgl1-mesa-dri:amd64.\n", - "Preparing to unpack .../231-libgl1-mesa-dri_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libgl1-mesa-dri:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglvnd0:amd64.\n", - "Preparing to unpack .../232-libglvnd0_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglvnd0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libgles1:amd64.\n", - "Preparing to unpack .../233-libgles1_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libgles1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libxcb-glx0:amd64.\n", - "Preparing to unpack .../234-libxcb-glx0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-glx0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libglx-mesa0:amd64.\n", - "Preparing to unpack .../235-libglx-mesa0_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libglx-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglx0:amd64.\n", - "Preparing to unpack .../236-libglx0_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglx0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libgl1:amd64.\n", - "Preparing to unpack .../237-libgl1_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libgl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libglu1-mesa:amd64.\n", - "Preparing to unpack .../238-libglu1-mesa_9.0.0-2.1build1_amd64.deb ...\n", - "Unpacking libglu1-mesa:amd64 (9.0.0-2.1build1) ...\n", - "Selecting previously unselected package mesa-common-dev:amd64.\n", - "Preparing to unpack .../239-mesa-common-dev_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking mesa-common-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglvnd-core-dev:amd64.\n", - "Preparing to unpack .../240-libglvnd-core-dev_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglvnd-core-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libegl1:amd64.\n", - "Preparing to unpack .../241-libegl1_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libegl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libgles2:amd64.\n", - "Preparing to unpack .../242-libgles2_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libgles2:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libopengl0:amd64.\n", - "Preparing to unpack .../243-libopengl0_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libopengl0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libglvnd-dev:amd64.\n", - "Preparing to unpack .../244-libglvnd-dev_1.0.0-2ubuntu2.3_amd64.deb ...\n", - "Unpacking libglvnd-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Selecting previously unselected package libx11-xcb-dev:amd64.\n", - "Preparing to unpack .../245-libx11-xcb-dev_2%3a1.6.4-3ubuntu0.2_amd64.deb ...\n", - "Unpacking libx11-xcb-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Selecting previously unselected package libxcb-dri3-dev:amd64.\n", - "Preparing to unpack .../246-libxcb-dri3-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri3-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-randr0:amd64.\n", - "Preparing to unpack .../247-libxcb-randr0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-randr0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-randr0-dev:amd64.\n", - "Preparing to unpack .../248-libxcb-randr0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-randr0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shape0:amd64.\n", - "Preparing to unpack .../249-libxcb-shape0_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shape0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-shape0-dev:amd64.\n", - "Preparing to unpack .../250-libxcb-shape0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-shape0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-xfixes0-dev:amd64.\n", - "Preparing to unpack .../251-libxcb-xfixes0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-xfixes0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-sync-dev:amd64.\n", - "Preparing to unpack .../252-libxcb-sync-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-sync-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-present-dev:amd64.\n", - "Preparing to unpack .../253-libxcb-present-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-present-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxshmfence-dev:amd64.\n", - "Preparing to unpack .../254-libxshmfence-dev_1.3-1_amd64.deb ...\n", - "Unpacking libxshmfence-dev:amd64 (1.3-1) ...\n", - "Selecting previously unselected package libxcb-dri2-0-dev:amd64.\n", - "Preparing to unpack .../255-libxcb-dri2-0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-dri2-0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package libxcb-glx0-dev:amd64.\n", - "Preparing to unpack .../256-libxcb-glx0-dev_1.13-2~ubuntu18.04_amd64.deb ...\n", - "Unpacking libxcb-glx0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Selecting previously unselected package x11proto-xf86vidmode-dev.\n", - "Preparing to unpack .../257-x11proto-xf86vidmode-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xf86vidmode-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxxf86vm-dev:amd64.\n", - "Preparing to unpack .../258-libxxf86vm-dev_1%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86vm-dev:amd64 (1:1.1.4-1) ...\n", - "Selecting previously unselected package libgl1-mesa-dev:amd64.\n", - "Preparing to unpack .../259-libgl1-mesa-dev_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libgl1-mesa-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libglu1-mesa-dev:amd64.\n", - "Preparing to unpack .../260-libglu1-mesa-dev_9.0.0-2.1build1_amd64.deb ...\n", - "Unpacking libglu1-mesa-dev:amd64 (9.0.0-2.1build1) ...\n", - "Selecting previously unselected package libgmpxx4ldbl:amd64.\n", - "Preparing to unpack .../261-libgmpxx4ldbl_2%3a6.1.2+dfsg-2_amd64.deb ...\n", - "Unpacking libgmpxx4ldbl:amd64 (2:6.1.2+dfsg-2) ...\n", - "Selecting previously unselected package libgmp-dev:amd64.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Preparing to unpack .../262-libgmp-dev_2%3a6.1.2+dfsg-2_amd64.deb ...\n", - "Unpacking libgmp-dev:amd64 (2:6.1.2+dfsg-2) ...\n", - "Selecting previously unselected package libgts-0.7-5:amd64.\n", - "Preparing to unpack .../263-libgts-0.7-5_0.7.6+darcs121130-4_amd64.deb ...\n", - "Unpacking libgts-0.7-5:amd64 (0.7.6+darcs121130-4) ...\n", - "Selecting previously unselected package libltdl7:amd64.\n", - "Preparing to unpack .../264-libltdl7_2.4.6-2_amd64.deb ...\n", - "Unpacking libltdl7:amd64 (2.4.6-2) ...\n", - "Selecting previously unselected package libpathplan4.\n", - "Preparing to unpack .../265-libpathplan4_2.40.1-2_amd64.deb ...\n", - "Unpacking libpathplan4 (2.40.1-2) ...\n", - "Selecting previously unselected package libgvc6.\n", - "Preparing to unpack .../266-libgvc6_2.40.1-2_amd64.deb ...\n", - "Unpacking libgvc6 (2.40.1-2) ...\n", - "Selecting previously unselected package libgvpr2.\n", - "Preparing to unpack .../267-libgvpr2_2.40.1-2_amd64.deb ...\n", - "Unpacking libgvpr2 (2.40.1-2) ...\n", - "Selecting previously unselected package libxdot4.\n", - "Preparing to unpack .../268-libxdot4_2.40.1-2_amd64.deb ...\n", - "Unpacking libxdot4 (2.40.1-2) ...\n", - "Selecting previously unselected package liblab-gamut1.\n", - "Preparing to unpack .../269-liblab-gamut1_2.40.1-2_amd64.deb ...\n", - "Unpacking liblab-gamut1 (2.40.1-2) ...\n", - "Selecting previously unselected package libgvc6-plugins-gtk.\n", - "Preparing to unpack .../270-libgvc6-plugins-gtk_2.40.1-2_amd64.deb ...\n", - "Unpacking libgvc6-plugins-gtk (2.40.1-2) ...\n", - "Selecting previously unselected package libgraphviz-dev.\n", - "Preparing to unpack .../271-libgraphviz-dev_2.40.1-2_amd64.deb ...\n", - "Unpacking libgraphviz-dev (2.40.1-2) ...\n", - "Selecting previously unselected package libgtk2.0-bin.\n", - "Preparing to unpack .../272-libgtk2.0-bin_2.24.32-1ubuntu1_amd64.deb ...\n", - "Unpacking libgtk2.0-bin (2.24.32-1ubuntu1) ...\n", - "Selecting previously unselected package libgts-bin.\n", - "Preparing to unpack .../273-libgts-bin_0.7.6+darcs121130-4_amd64.deb ...\n", - "Unpacking libgts-bin (0.7.6+darcs121130-4) ...\n", - "Selecting previously unselected package libltdl-dev:amd64.\n", - "Preparing to unpack .../274-libltdl-dev_2.4.6-2_amd64.deb ...\n", - "Unpacking libltdl-dev:amd64 (2.4.6-2) ...\n", - "Selecting previously unselected package libmad0:amd64.\n", - "Preparing to unpack .../275-libmad0_0.15.1b-9ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libmad0:amd64 (0.15.1b-9ubuntu18.04.1) ...\n", - "Selecting previously unselected package libmad0-dev.\n", - "Preparing to unpack .../276-libmad0-dev_0.15.1b-9ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libmad0-dev (0.15.1b-9ubuntu18.04.1) ...\n", - "Selecting previously unselected package libsys-hostname-long-perl.\n", - "Preparing to unpack .../277-libsys-hostname-long-perl_1.5-1_all.deb ...\n", - "Unpacking libsys-hostname-long-perl (1.5-1) ...\n", - "Selecting previously unselected package libmail-sendmail-perl.\n", - "Preparing to unpack .../278-libmail-sendmail-perl_0.80-1_all.deb ...\n", - "Unpacking libmail-sendmail-perl (0.80-1) ...\n", - "Selecting previously unselected package libmikmod-config.\n", - "Preparing to unpack .../279-libmikmod-config_3.3.11.1-3_amd64.deb ...\n", - "Unpacking libmikmod-config (3.3.11.1-3) ...\n", - "Selecting previously unselected package libopenal-data.\n", - "Preparing to unpack .../280-libopenal-data_1%3a1.18.2-2_all.deb ...\n", - "Unpacking libopenal-data (1:1.18.2-2) ...\n", - "Selecting previously unselected package libsndio6.1:amd64.\n", - "Preparing to unpack .../281-libsndio6.1_1.1.0-3_amd64.deb ...\n", - "Unpacking libsndio6.1:amd64 (1.1.0-3) ...\n", - "Selecting previously unselected package libopenal1:amd64.\n", - "Preparing to unpack .../282-libopenal1_1%3a1.18.2-2_amd64.deb ...\n", - "Unpacking libopenal1:amd64 (1:1.18.2-2) ...\n", - "Selecting previously unselected package libwayland-cursor0:amd64.\n", - "Preparing to unpack .../283-libwayland-cursor0_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-cursor0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libwayland-egl1:amd64.\n", - "Preparing to unpack .../284-libwayland-egl1_1.16.0-1ubuntu1.1~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-egl1:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Selecting previously unselected package libwayland-egl1-mesa:amd64.\n", - "Preparing to unpack .../285-libwayland-egl1-mesa_19.2.8-0ubuntu0~18.04.3_amd64.deb ...\n", - "Unpacking libwayland-egl1-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Selecting previously unselected package libxkbcommon0:amd64.\n", - "Preparing to unpack .../286-libxkbcommon0_0.8.2-1~ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libxkbcommon0:amd64 (0.8.2-1~ubuntu18.04.1) ...\n", - "Selecting previously unselected package libsdl2-2.0-0:amd64.\n", - "Preparing to unpack .../287-libsdl2-2.0-0_2.0.8+dfsg1-1ubuntu1.18.04.4_amd64.deb ...\n", - "Unpacking libsdl2-2.0-0:amd64 (2.0.8+dfsg1-1ubuntu1.18.04.4) ...\n", - "Selecting previously unselected package libmikmod3:amd64.\n", - "Preparing to unpack .../288-libmikmod3_3.3.11.1-3_amd64.deb ...\n", - "Unpacking libmikmod3:amd64 (3.3.11.1-3) ...\n", - "Selecting previously unselected package libmikmod-dev:amd64.\n", - "Preparing to unpack .../289-libmikmod-dev_3.3.11.1-3_amd64.deb ...\n", - "Unpacking libmikmod-dev:amd64 (3.3.11.1-3) ...\n", - "Selecting previously unselected package libmodplug1:amd64.\n", - "Preparing to unpack .../290-libmodplug1_1%3a0.8.9.0-1_amd64.deb ...\n", - "Unpacking libmodplug1:amd64 (1:0.8.9.0-1) ...\n", - "Selecting previously unselected package libmodplug-dev:amd64.\n", - "Preparing to unpack .../291-libmodplug-dev_1%3a0.8.9.0-1_amd64.deb ...\n", - "Unpacking libmodplug-dev:amd64 (1:0.8.9.0-1) ...\n", - "Selecting previously unselected package libproxy1v5:amd64.\n", - "Preparing to unpack .../292-libproxy1v5_0.4.15-1_amd64.deb ...\n", - "Unpacking libproxy1v5:amd64 (0.4.15-1) ...\n", - "Selecting previously unselected package libproxy-tools.\n", - "Preparing to unpack .../293-libproxy-tools_0.4.15-1_amd64.deb ...\n", - "Unpacking libproxy-tools (0.4.15-1) ...\n", - "Selecting previously unselected package libpulse-mainloop-glib0:amd64.\n", - "Preparing to unpack .../294-libpulse-mainloop-glib0_1%3a11.1-1ubuntu7.4_amd64.deb ...\n", - "Unpacking libpulse-mainloop-glib0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Selecting previously unselected package libpulse-dev:amd64.\n", - "Preparing to unpack .../295-libpulse-dev_1%3a11.1-1ubuntu7.4_amd64.deb ...\n", - "Unpacking libpulse-dev:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Selecting previously unselected package libsdl1.2debian:amd64.\n", - "Preparing to unpack .../296-libsdl1.2debian_1.2.15+dfsg2-0.1ubuntu0.1_amd64.deb ...\n", - "Unpacking libsdl1.2debian:amd64 (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Selecting previously unselected package libvorbisfile3:amd64.\n", - "Preparing to unpack .../297-libvorbisfile3_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbisfile3:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libsdl-mixer1.2:amd64.\n", - "Preparing to unpack .../298-libsdl-mixer1.2_1.2.12-14_amd64.deb ...\n", - "Unpacking libsdl-mixer1.2:amd64 (1.2.12-14) ...\n", - "Selecting previously unselected package libsdl1.2-dev.\n", - "Preparing to unpack .../299-libsdl1.2-dev_1.2.15+dfsg2-0.1ubuntu0.1_amd64.deb ...\n", - "Unpacking libsdl1.2-dev (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Selecting previously unselected package libvorbis-dev:amd64.\n", - "Preparing to unpack .../300-libvorbis-dev_1.3.5-4.2_amd64.deb ...\n", - "Unpacking libvorbis-dev:amd64 (1.3.5-4.2) ...\n", - "Selecting previously unselected package libsdl-mixer1.2-dev:amd64.\n", - "Preparing to unpack .../301-libsdl-mixer1.2-dev_1.2.12-14_amd64.deb ...\n", - "Unpacking libsdl-mixer1.2-dev:amd64 (1.2.12-14) ...\n", - "Selecting previously unselected package libserf-1-1:amd64.\n", - "Preparing to unpack .../302-libserf-1-1_1.3.9-6_amd64.deb ...\n", - "Unpacking libserf-1-1:amd64 (1.3.9-6) ...\n", - "Selecting previously unselected package libsvn1:amd64.\n", - "Preparing to unpack .../303-libsvn1_1.9.7-4ubuntu1_amd64.deb ...\n", - "Unpacking libsvn1:amd64 (1.9.7-4ubuntu1) ...\n", - "Selecting previously unselected package libvlccore9:amd64.\n", - "Preparing to unpack .../304-libvlccore9_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlccore9:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libvlc5:amd64.\n", - "Preparing to unpack .../305-libvlc5_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlc5:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libvlc-bin:amd64.\n", - "Preparing to unpack .../306-libvlc-bin_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlc-bin:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libvlc-dev:amd64.\n", - "Preparing to unpack .../307-libvlc-dev_3.0.8-0ubuntu18.04.1_amd64.deb ...\n", - "Unpacking libvlc-dev:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Selecting previously unselected package libx11-doc.\n", - "Preparing to unpack .../308-libx11-doc_2%3a1.6.4-3ubuntu0.2_all.deb ...\n", - "Unpacking libx11-doc (2:1.6.4-3ubuntu0.2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Selecting previously unselected package x11proto-xf86dga-dev.\n", - "Preparing to unpack .../309-x11proto-xf86dga-dev_2018.4-4_all.deb ...\n", - "Unpacking x11proto-xf86dga-dev (2018.4-4) ...\n", - "Selecting previously unselected package libxxf86dga-dev:amd64.\n", - "Preparing to unpack .../310-libxxf86dga-dev_2%3a1.1.4-1_amd64.deb ...\n", - "Unpacking libxxf86dga-dev:amd64 (2:1.1.4-1) ...\n", - "Selecting previously unselected package subversion.\n", - "Preparing to unpack .../311-subversion_1.9.7-4ubuntu1_amd64.deb ...\n", - "Unpacking subversion (1.9.7-4ubuntu1) ...\n", - "Selecting previously unselected package timgm6mb-soundfont.\n", - "Preparing to unpack .../312-timgm6mb-soundfont_1.3-2_all.deb ...\n", - "Unpacking timgm6mb-soundfont (1.3-2) ...\n", - "Selecting previously unselected package zip.\n", - "Preparing to unpack .../313-zip_3.0-11build1_amd64.deb ...\n", - "Unpacking zip (3.0-11build1) ...\n", - "Selecting previously unselected package libc6-dbg:amd64.\n", - "Preparing to unpack .../314-libc6-dbg_2.27-3ubuntu1_amd64.deb ...\n", - "Unpacking libc6-dbg:amd64 (2.27-3ubuntu1) ...\n", - "Selecting previously unselected package libdca0:amd64.\n", - "Preparing to unpack .../315-libdca0_0.0.5-10_amd64.deb ...\n", - "Unpacking libdca0:amd64 (0.0.5-10) ...\n", - "Selecting previously unselected package libdca-dev:amd64.\n", - "Preparing to unpack .../316-libdca-dev_0.0.5-10_amd64.deb ...\n", - "Unpacking libdca-dev:amd64 (0.0.5-10) ...\n", - "Selecting previously unselected package libdts-dev:amd64.\n", - "Preparing to unpack .../317-libdts-dev_0.0.5-10_amd64.deb ...\n", - "Unpacking libdts-dev:amd64 (0.0.5-10) ...\n", - "Setting up libapr1:amd64 (1.6.3-2) ...\n", - "Setting up libglvnd0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libopengl0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libpathplan4 (2.40.1-2) ...\n", - "Setting up libgles1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libicu60:amd64 (60.2-3ubuntu3.1) ...\n", - "Setting up liblab-gamut1 (2.40.1-2) ...\n", - "Setting up libgtk2.0-common (2.24.32-1ubuntu1) ...\n", - "Setting up libxdot4 (2.40.1-2) ...\n", - "Setting up libc6-dbg:amd64 (2.27-3ubuntu1) ...\n", - "Setting up libasyncns0:amd64 (0.8-6) ...\n", - "Setting up libarchive-zip-perl (1.60-1ubuntu0.1) ...\n", - "Setting up libmodplug1:amd64 (1:0.8.9.0-1) ...\n", - "Setting up libtimedate-perl (2.3000-2) ...\n", - "Setting up libjbig0:amd64 (2.1-3.1build1) ...\n", - "Setting up libsigsegv2:amd64 (2.12-1) ...\n", - "Setting up libpthread-stubs0-dev:amd64 (0.3-4) ...\n", - "Setting up fonts-dejavu-core (2.37-1) ...\n", - "Setting up gir1.2-freedesktop:amd64 (1.56.1-1) ...\n", - "Setting up libelf1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Setting up groff-base (1.22.3-10) ...\n", - "Setting up libglib2.0-0:amd64 (2.56.4-0ubuntu0.18.04.6) ...\n", - "No schema files found: doing nothing.\n", - "Setting up libasound2-data (1.1.3-5ubuntu0.4) ...\n", - "Setting up libxshmfence1:amd64 (1.3-1) ...\n", - "Setting up xorg-sgml-doctools (1:1.11-1) ...\n", - "Setting up libwayland-client0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up xkb-data (2.23.1-1ubuntu1.18.04.1) ...\n", - "Setting up libproxy1v5:amd64 (0.4.15-1) ...\n", - "Setting up libarchive-cpio-perl (0.10-1) ...\n", - "Setting up libgdk-pixbuf2.0-common (2.36.11-2) ...\n", - "Setting up libdatrie1:amd64 (0.2.10-7) ...\n", - "Setting up libtiff5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Setting up gettext-base (0.19.8.1-6ubuntu0.3) ...\n", - "Setting up libpipeline1:amd64 (1.5.0-1) ...\n", - "Setting up libglapi-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up m4 (1.4.18-1) ...\n", - "Setting up fp-docs-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libxml2:amd64 (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Setting up zip (3.0-11build1) ...\n", - "Setting up x11proto-dev (2018.4-4) ...\n", - "Setting up libmagic-mgc (1:5.32-2ubuntu0.3) ...\n", - "Setting up libasound2:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Setting up libopenal-data (1:1.18.2-2) ...\n", - "Setting up libmagic1:amd64 (1:5.32-2ubuntu0.3) ...\n", - "Setting up libdrm-common (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libgraphite2-3:amd64 (1.3.11-2) ...\n", - "Setting up libjbig-dev:amd64 (2.1-3.1build1) ...\n", - "Setting up libcroco3:amd64 (0.6.12-2) ...\n", - "Setting up libogg0:amd64 (1.3.2-1) ...\n", - "Setting up libsys-hostname-long-perl (1.5-1) ...\n", - "Setting up libatk1.0-data (2.28.1-1) ...\n", - "Setting up liba52-0.7.4:amd64 (0.7.4-19) ...\n", - "Setting up x11proto-damage-dev (1:2018.4-4) ...\n", - "Setting up libx11-xcb1:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libpixman-1-0:amd64 (0.34.0-2) ...\n", - "Setting up xtrans-dev (1.3.5-1) ...\n", - "Setting up x11proto-xext-dev (2018.4-4) ...\n", - "Setting up libglib2.0-data (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up fp-units-rtl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libmail-sendmail-perl (0.80-1) ...\n", - "Setting up lazarus-src-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up x11proto-xinerama-dev (2018.4-4) ...\n", - "Setting up fpc-source-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up autotools-dev (20180224.1) ...\n", - "Setting up libpixman-1-dev:amd64 (0.34.0-2) ...\n", - "Setting up libatk1.0-0:amd64 (2.28.1-1) ...\n", - "Setting up libaprutil1:amd64 (1.6.1-2) ...\n", - "Setting up x11proto-randr-dev (2018.4-4) ...\n", - "Setting up libltdl7:amd64 (2.4.6-2) ...\n", - "Setting up libtiffxx5:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Setting up liba52-0.7.4-dev (0.7.4-19) ...\n", - "Setting up libx11-doc (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libpciaccess0:amd64 (0.14-1) ...\n", - "Setting up x11proto-xf86dga-dev (2018.4-4) ...\n", - "Setting up libmikmod-config (3.3.11.1-3) ...\n", - "Setting up libsensors4:amd64 (1:3.4.0-4) ...\n", - "Setting up libgles2:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up shared-mime-info (1.9-2) ...\n", - "Setting up libxkbcommon0:amd64 (0.8.2-1~ubuntu18.04.1) ...\n", - "Setting up libpcrecpp0v5:amd64 (2:8.39-9) ...\n", - "Setting up libpcre32-3:amd64 (2:8.39-9) ...\n", - "Setting up libvpx5:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Setting up gdbserver (8.1-0ubuntu3.2) ...\n", - "Setting up icu-devtools (60.2-3ubuntu3.1) ...\n", - "Setting up libpcre16-3:amd64 (2:8.39-9) ...\n", - "Setting up libthai-data (0.1.27-2) ...\n", - "Setting up liblzma-dev:amd64 (5.2.2-1.3) ...\n", - "Setting up libxdmcp6:amd64 (1:1.1.2-3) ...\n", - "Setting up timgm6mb-soundfont (1.3-2) ...\n", - "Setting up libmad0:amd64 (0.15.1b-9ubuntu18.04.1) ...\n", - "Setting up libllvm9:amd64 (1:9-2~ubuntu18.04.2) ...\n", - "Setting up bsdmainutils (11.1.2ubuntu1) ...\n", - "update-alternatives: using /usr/bin/bsd-write to provide /usr/bin/write (write) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/write.1.gz because associated file /usr/share/man/man1/bsd-write.1.gz (of link group write) doesn't exist\n", - "update-alternatives: using /usr/bin/bsd-from to provide /usr/bin/from (from) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/from.1.gz because associated file /usr/share/man/man1/bsd-from.1.gz (of link group from) doesn't exist\n", - "Setting up libgmpxx4ldbl:amd64 (2:6.1.2+dfsg-2) ...\n", - "Setting up libdca0:amd64 (0.0.5-10) ...\n", - "Setting up libsamplerate0:amd64 (0.1.9-1) ...\n", - "Setting up libsndio6.1:amd64 (1.1.0-3) ...\n", - "Setting up libvorbis0a:amd64 (1.3.5-4.2) ...\n", - "Setting up x11-common (1:7.7+19ubuntu7.1) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-rc.d: warning: start and stop actions are no longer supported; falling back to defaults\n", - "invoke-rc.d: could not determine current runlevel\n", - "invoke-rc.d: policy-rc.d denied execution of start.\n", - "Setting up libmodplug-dev:amd64 (1:0.8.9.0-1) ...\n", - "Setting up fp-utils-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "update-alternatives: using /usr/lib/x86_64-linux-gnu/fpc/3.0.4 to provide /usr/lib/x86_64-linux-gnu/fpc/default (fp-utils) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/chmcmd.1.gz because associated file /usr/share/man/man1/chmcmd-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/chmls.1.gz because associated file /usr/share/man/man1/chmls-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ifpc.1.gz because associated file /usr/share/man/man1/ifpc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fppkg.1.gz because associated file /usr/share/man/man1/fppkg-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/instantfpc.1.gz because associated file /usr/share/man/man1/ifpc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/bin2obj.1.gz because associated file /usr/share/man/man1/bin2obj-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/data2inc.1.gz because associated file /usr/share/man/man1/data2inc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fprcp.1.gz because associated file /usr/share/man/man1/fprcp-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/h2paspp.1.gz because associated file /usr/share/man/man1/h2paspp-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/makeskel.1.gz because associated file /usr/share/man/man1/makeskel-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/postw32.1.gz because associated file /usr/share/man/man1/postw32-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/relpath.1.gz because associated file /usr/share/man/man1/relpath-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/rmcvsdir.1.gz because associated file /usr/share/man/man1/rmcvsdir-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/unitdiff.1.gz because associated file /usr/share/man/man1/unitdiff-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/delp.1.gz because associated file /usr/share/man/man1/delp-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpcmake.1.gz because associated file /usr/share/man/man1/fpcmake-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpcsubst.1.gz because associated file /usr/share/man/man1/fpcsubst-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/h2pas.1.gz because associated file /usr/share/man/man1/h2pas-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/plex.1.gz because associated file /usr/share/man/man1/plex-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppdep.1.gz because associated file /usr/share/man/man1/ppdep-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppudump.1.gz because associated file /usr/share/man/man1/ppudump-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppufiles.1.gz because associated file /usr/share/man/man1/ppufiles-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppumove.1.gz because associated file /usr/share/man/man1/ppumove-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ptop.1.gz because associated file /usr/share/man/man1/ptop-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/pyacc.1.gz because associated file /usr/share/man/man1/pyacc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/rstconv.1.gz because associated file /usr/share/man/man1/rstconv-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpdoc.1.gz because associated file /usr/share/man/man1/fpdoc-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpclasschart.1.gz because associated file /usr/share/man/man1/fpclasschart-3.0.4.1.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man5/fpcmake.5.gz because associated file /usr/share/man/man5/fpcmake-3.0.4.5.gz (of link group fp-utils) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man5/ptop.cfg.5.gz because associated file /usr/share/man/man5/ptop-3.0.4.cfg.5.gz (of link group fp-utils) doesn't exist\n", - "Setting up hicolor-icon-theme (0.17-2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Setting up libglib2.0-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up libogg-dev:amd64 (1.3.2-1) ...\n", - "Setting up libslang2:amd64 (2.3.1a-3ubuntu1) ...\n", - "Setting up libslang2-dev:amd64 (2.3.1a-3ubuntu1) ...\n", - "Setting up libglvnd-core-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libgraphite2-dev:amd64 (1.3.11-2) ...\n", - "Setting up libwayland-cursor0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up x11proto-input-dev (2018.4-4) ...\n", - "Setting up x11proto-composite-dev (1:2018.4-4) ...\n", - "Setting up libcdt5 (2.40.1-2) ...\n", - "Setting up libwayland-egl1:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up libx11-data (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libpython2.7-stdlib:amd64 (2.7.17-1~18.04) ...\n", - "Setting up lazarus-doc-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up fp-ide-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-alternatives: using /usr/bin/fp-3.0.4 to provide /usr/bin/fp (fp) in auto mode\n", - "Setting up libxau6:amd64 (1:1.0.8-1) ...\n", - "Setting up autopoint (0.19.8.1-6ubuntu0.3) ...\n", - "Setting up libidn11:amd64 (1.33-2.1ubuntu1.2) ...\n", - "Setting up liblzo2-2:amd64 (2.08-1.2) ...\n", - "Setting up libavahi-common-data:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Setting up libcgraph6 (2.40.1-2) ...\n", - "Setting up fpc-source (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libwayland-server0:amd64 (1.16.0-1ubuntu1.1~18.04.3) ...\n", - "Setting up libwebp6:amd64 (0.6.1-2) ...\n", - "Setting up libfile-stripnondeterminism-perl (0.040-1.1~build1) ...\n", - "Setting up fp-units-base-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libvorbisfile3:amd64 (1.3.5-4.2) ...\n", - "Setting up libgmp-dev:amd64 (2:6.1.2+dfsg-2) ...\n", - "Setting up fp-units-multimedia-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up fp-units-math-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libgts-0.7-5:amd64 (0.7.6+darcs121130-4) ...\n", - "Setting up libmad0-dev (0.15.1b-9ubuntu18.04.1) ...\n", - "Setting up libasound2-dev:amd64 (1.1.3-5ubuntu0.4) ...\n", - "Setting up libdca-dev:amd64 (0.0.5-10) ...\n", - "Setting up libpcre3-dev:amd64 (2:8.39-9) ...\n", - "Setting up gir1.2-atk-1.0:amd64 (2.28.1-1) ...\n", - "Setting up fontconfig-config (2.12.6-0ubuntu2) ...\n", - "Setting up x11proto-core-dev (2018.4-4) ...\n", - "Setting up libltdl-dev:amd64 (2.4.6-2) ...\n", - "Setting up libxshmfence-dev:amd64 (1.3-1) ...\n", - "Setting up libglib2.0-dev-bin (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up libdw1:amd64 (0.170-0.4ubuntu0.1) ...\n", - "Setting up gettext (0.19.8.1-6ubuntu0.3) ...\n", - "Setting up fp-units-fv-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up fp-compiler-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-alternatives: using /usr/bin/x86_64-linux-gnu-fpc-3.0.4 to provide /usr/bin/fpc (fpc) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/ppcx64.1.gz because associated file /usr/share/man/man1/ppcx64-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpc.1.gz because associated file /usr/share/man/man1/fpc-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpc-depends.1.gz because associated file /usr/share/man/man1/fpc-depends-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/fpcres.1.gz because associated file /usr/share/man/man1/fpcres-3.0.4.1.gz (of link group fpc) doesn't exist\n", - "update-alternatives: using /etc/fpc-3.0.4.cfg to provide /etc/fpc.cfg (fpc.cfg) in auto mode\n", - "update-alternatives: using /usr/bin/fpc to provide /usr/bin/pc (pc) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/pc.1.gz because associated file /usr/share/man/man1/fpc.1.gz (of link group pc) doesn't exist\n", - "Setting up libgvpr2 (2.40.1-2) ...\n", - "Setting up libproxy-tools (0.4.15-1) ...\n", - "Setting up x11proto-fixes-dev (1:2018.4-4) ...\n", - "Setting up libtiff5-dev:amd64 (4.0.9-5ubuntu0.3) ...\n", - "Setting up libflac8:amd64 (1.3.2-1) ...\n", - "Setting up libxml2-utils (2.9.4+dfsg1-6.1ubuntu1.3) ...\n", - "Setting up fp-units-gfx-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up python2.7 (2.7.17-1~18.04) ...\n", - "Setting up fp-units-fcl-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libharfbuzz0b:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up x11proto-xf86vidmode-dev (2018.4-4) ...\n", - "Setting up libxau-dev:amd64 (1:1.0.8-1) ...\n", - "Setting up autoconf (2.69-11) ...\n", - "Setting up fp-units-misc-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libthai0:amd64 (0.1.27-2) ...\n", - "Setting up fp-units-net-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up file (1:5.32-2ubuntu0.3) ...\n", - "Setting up fp-units-db-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up libglib2.0-dev:amd64 (2.56.4-0ubuntu0.18.04.6) ...\n", - "Setting up libdrm2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libpython-stdlib:amd64 (2.7.15~rc1-1) ...\n", - "Setting up intltool-debian (0.35.0+20060710.4) ...\n", - "Setting up libdrm-intel1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libvpx-dev:amd64 (1.7.0-3ubuntu0.18.04.1) ...\n", - "Setting up libxdmcp-dev:amd64 (1:1.1.2-3) ...\n", - "Setting up gir1.2-harfbuzz-0.0:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up libserf-1-1:amd64 (1.3.9-6) ...\n", - "Setting up libvlccore9:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up lcl-utils-1.8 (1.8.2+dfsg-3) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "update-alternatives: using /usr/lib/lazarus/1.8.2 to provide /usr/lib/lazarus/default (lazarus) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/lazbuild.1.gz because associated file /usr/share/man/man1/lazbuild-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/lazre.1.gzs because associated file /usr/share/man/man1/lazres-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/lrstolfm.1.gz because associated file /usr/share/man/man1/lrstolfm-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/svn2revisioninc.1.gz because associated file /usr/share/man/man1/svn2revisioninc-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/updatepofiles.1.gz because associated file /usr/share/man/man1/updatepofiles-1.8.2.1.gz (of link group lazarus) doesn't exist\n", - "Setting up automake (1:1.15.1-3ubuntu2) ...\n", - "update-alternatives: using /usr/bin/automake-1.15 to provide /usr/bin/automake (automake) in auto mode\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/automake.1.gz because associated file /usr/share/man/man1/automake-1.15.1.gz (of link group automake) doesn't exist\n", - "update-alternatives: warning: skip creation of /usr/share/man/man1/aclocal.1.gz because associated file /usr/share/man/man1/aclocal-1.15.1.gz (of link group automake) doesn't exist\n", - "Setting up libjack-jackd2-0:amd64 (1.9.12~dfsg-2) ...\n", - "Setting up libice6:amd64 (2:1.0.9-2) ...\n", - "Setting up man-db (2.8.3-2ubuntu0.1) ...\n", - "debconf: unable to initialize frontend: Dialog\n", - "debconf: (No usable dialog-like program is installed, so the dialog based frontend cannot be used. at /usr/share/perl5/Debconf/FrontEnd/Dialog.pm line 76.)\n", - "debconf: falling back to frontend: Readline\n", - "Building database of manual pages ...\n", - "Setting up libopenal1:amd64 (1:1.18.2-2) ...\n", - "Setting up libdts-dev:amd64 (0.0.5-10) ...\n", - "Setting up libsvn1:amd64 (1.9.7-4ubuntu1) ...\n", - "Setting up libavahi-common3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Setting up libdrm-radeon1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libgts-bin (0.7.6+darcs121130-4) ...\n", - "Setting up libvorbisenc2:amd64 (1.3.5-4.2) ...\n", - "Setting up libdrm-nouveau2:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libcaca0:amd64 (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Setting up libxcb1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libharfbuzz-gobject0:amd64 (1.7.2-1ubuntu1) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Setting up python (2.7.15~rc1-1) ...\n", - "Setting up libbabeltrace1:amd64 (1.5.5-1) ...\n", - "Setting up libvorbis-dev:amd64 (1.3.5-4.2) ...\n", - "Setting up libtool (2.4.6-2) ...\n", - "Setting up libxcb-present0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libtiff-dev (4.0.9-5ubuntu0.3) ...\n", - "Setting up libfontconfig1:amd64 (2.12.6-0ubuntu2) ...\n", - "Setting up libxcb-dri2-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libsm6:amd64 (2:1.2.2-1) ...\n", - "Setting up libxcb-dri3-0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-glx0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-randr0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-xfixes0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-render0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libvlc5:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up libharfbuzz-icu0:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up po-debconf (1.0.20) ...\n", - "Setting up libdrm-amdgpu1:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up lcl-nogui-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up libdrm-dev:amd64 (2.4.99-1ubuntu1~18.04.2) ...\n", - "Setting up libicu-le-hb0:amd64 (1.0.3+git161113-4) ...\n", - "Setting up libgbm1:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libx11-6:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libflac-dev:amd64 (1.3.2-1) ...\n", - "Setting up libgl1-mesa-dri:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libatk1.0-dev:amd64 (2.28.1-1) ...\n", - "Setting up libxcb-sync1:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libsndfile1:amd64 (1.0.28-4ubuntu0.18.04.1) ...\n", - "Setting up libcaca-dev (0.99.beta19-2ubuntu0.18.04.1) ...\n", - "Setting up libvlc-bin:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up libice-dev:amd64 (2:1.0.9-2) ...\n", - "Setting up libiculx60:amd64 (60.2-3ubuntu3.1) ...\n", - "Setting up subversion (1.9.7-4ubuntu1) ...\n", - "Setting up libxcomposite1:amd64 (1:0.4.4-2) ...\n", - "Setting up libxcb-shm0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libvlc-dev:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Setting up libxpm4:amd64 (1:3.5.12-1) ...\n", - "Setting up libxt6:amd64 (1:1.1.5-1) ...\n", - "Setting up libxcb-shape0:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxrender1:amd64 (1:0.9.10-1) ...\n", - "Setting up libxcb1-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-glx0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libavahi-client3:amd64 (0.7-3.1ubuntu1.2) ...\n", - "Setting up libegl-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libx11-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libxft2:amd64 (2.3.2-1) ...\n", - "Setting up gdb (8.1-0ubuntu3.2) ...\n", - "Setting up libxcb-sync-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up fontconfig (2.12.6-0ubuntu2) ...\n", - "Regenerating fonts cache... done.\n", - "Setting up libcups2:amd64 (2.2.7-1ubuntu2.7) ...\n", - "Setting up libfontconfig1-dev:amd64 (2.12.6-0ubuntu2) ...\n", - "Setting up libx11-xcb-dev:amd64 (2:1.6.4-3ubuntu0.2) ...\n", - "Setting up libforms2 (1.2.3-1.3) ...\n", - "Setting up libsm-dev:amd64 (2:1.2.2-1) ...\n", - "Setting up libxdamage1:amd64 (1:1.1.4-3) ...\n", - "Setting up mesa-common-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libxext6:amd64 (2:1.3.3-1) ...\n", - "Setting up libxfixes3:amd64 (1:5.0.3-1) ...\n", - "Setting up libxpm-dev:amd64 (1:3.5.12-1) ...\n", - "Setting up libxss1:amd64 (1:1.2.2-1) ...\n", - "Setting up libgdk-pixbuf2.0-0:amd64 (2.36.11-2) ...\n", - "Setting up libxcb-shm0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libgdk-pixbuf2.0-bin (2.36.11-2) ...\n", - "Setting up libgd3:amd64 (2.2.5-4ubuntu0.4) ...\n", - "Setting up gir1.2-gdkpixbuf-2.0:amd64 (2.36.11-2) ...\n", - "Setting up libxrender-dev:amd64 (1:0.9.10-1) ...\n", - "Setting up libpulse0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Setting up libxcb-dri2-0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcb-render0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libegl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libxft-dev (2.3.2-1) ...\n", - "Setting up gtk-update-icon-cache (3.22.30-1ubuntu4) ...\n", - "Setting up libgdk-pixbuf2.0-dev (2.36.11-2) ...\n", - "Setting up libxcb-dri3-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxcursor1:amd64 (1:1.1.15-1) ...\n", - "Setting up libxxf86dga1:amd64 (2:1.1.4-1) ...\n", - "Setting up libxext-dev:amd64 (2:1.3.3-1) ...\n", - "Setting up libpango-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libwayland-egl1-mesa:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libxcb-shape0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxxf86dga-dev:amd64 (2:1.1.4-1) ...\n", - "Setting up libxxf86vm1:amd64 (1:1.1.4-1) ...\n", - "Setting up libxxf86vm-dev:amd64 (1:1.1.4-1) ...\n", - "Setting up libxfixes-dev:amd64 (1:5.0.3-1) ...\n", - "Setting up libfluidsynth1:amd64 (1.1.9-1) ...\n", - "Setting up libxrandr2:amd64 (2:1.5.1-1) ...\n", - "Setting up libglx-mesa0:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libxi6:amd64 (2:1.7.9-1) ...\n", - "Setting up libcairo2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libxinerama1:amd64 (2:1.1.3-1) ...\n", - "Setting up libxcursor-dev:amd64 (1:1.1.15-1) ...\n", - "Setting up libforms-dev (1.2.3-1.3) ...\n", - "Setting up libxcb-randr0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxt-dev:amd64 (1:1.1.5-1) ...\n", - "Setting up libpulse-mainloop-glib0:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Setting up libpulse-dev:amd64 (1:11.1-1ubuntu7.4) ...\n", - "Setting up libxrandr-dev:amd64 (2:1.5.1-1) ...\n", - "Setting up libxcomposite-dev:amd64 (1:0.4.4-2) ...\n", - "Setting up libcairo-script-interpreter2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libxcb-xfixes0-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libcairo-gobject2:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libxdamage-dev:amd64 (1:1.1.4-3) ...\n", - "Setting up libsdl1.2debian:amd64 (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Setting up libpangoft2-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libgd-dev:amd64 (2.2.5-4ubuntu0.4) ...\n", - "Setting up libsdl2-2.0-0:amd64 (2.0.8+dfsg1-1ubuntu1.18.04.4) ...\n", - "Setting up libxinerama-dev:amd64 (2:1.1.3-1) ...\n", - "Setting up libxcb-present-dev:amd64 (1.13-2~ubuntu18.04) ...\n", - "Setting up libxi-dev:amd64 (2:1.7.9-1) ...\n", - "Setting up libglx0:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libcairo2-dev:amd64 (1.15.10-2ubuntu0.1) ...\n", - "Setting up libpangoxft-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libpangocairo-1.0-0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up gir1.2-pango-1.0:amd64 (1.40.14-1ubuntu0.1) ...\n", - "Setting up libmikmod3:amd64 (3.3.11.1-3) ...\n", - "Setting up libgl1:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up libglu1-mesa:amd64 (9.0.0-2.1build1) ...\n", - "Setting up libgvc6 (2.40.1-2) ...\n", - "Setting up librsvg2-2:amd64 (2.40.20-2) ...\n", - "Setting up libsdl-mixer1.2:amd64 (1.2.12-14) ...\n", - "Setting up libmikmod-dev:amd64 (3.3.11.1-3) ...\n", - "Setting up libglvnd-dev:amd64 (1.0.0-2ubuntu2.3) ...\n", - "Setting up librsvg2-common:amd64 (2.40.20-2) ...\n", - "Setting up libgl1-mesa-dev:amd64 (19.2.8-0ubuntu0~18.04.3) ...\n", - "Setting up libglu1-mesa-dev:amd64 (9.0.0-2.1build1) ...\n", - "Setting up libsdl1.2-dev (1.2.15+dfsg2-0.1ubuntu0.1) ...\n", - "Setting up libsdl-mixer1.2-dev:amd64 (1.2.12-14) ...\n", - "Setting up dh-autoreconf (17) ...\n", - "Setting up libicu-le-hb-dev:amd64 (1.0.3+git161113-4) ...\n", - "Setting up libicu-dev (60.2-3ubuntu3.1) ...\n", - "Setting up libharfbuzz-dev:amd64 (1.7.2-1ubuntu1) ...\n", - "Setting up adwaita-icon-theme (3.28.0-1ubuntu1) ...\n", - "update-alternatives: using /usr/share/icons/Adwaita/cursor.theme to provide /usr/share/icons/default/index.theme (x-cursor-theme) in auto mode\n", - "Setting up debhelper (11.1.6ubuntu2) ...\n", - "Setting up libgtk2.0-0:amd64 (2.24.32-1ubuntu1) ...\n", - "Setting up libgail18:amd64 (2.24.32-1ubuntu1) ...\n", - "Setting up lazarus-ide-1.8 (1.8.2+dfsg-3) ...\n", - "update-alternatives: using /usr/lib/lazarus/1.8.2/startlazarus to provide /usr/bin/lazarus-ide (lazarus-ide) in auto mode\n", - "Setting up libgail-common:amd64 (2.24.32-1ubuntu1) ...\n", - "Setting up libgvc6-plugins-gtk (2.40.1-2) ...\n", - "Setting up humanity-icon-theme (0.6.15) ...\n", - "Setting up libgraphviz-dev (2.40.1-2) ...\n", - "Setting up dh-strip-nondeterminism (0.040-1.1~build1) ...\n", - "Setting up lazarus-ide-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "update-alternatives: using /usr/lib/lazarus/1.8.2/lazarus-gtk2 to provide /usr/lib/lazarus/1.8.2/lazarus (lazarus-1.8.2) in auto mode\n", - "Setting up libpango1.0-dev (1.40.14-1ubuntu0.1) ...\n", - "Setting up lazarus-ide (1.8.2+dfsg-3) ...\n", - "Setting up gir1.2-gtk-2.0 (2.24.32-1ubuntu1) ...\n", - "Setting up libgtk2.0-bin (2.24.32-1ubuntu1) ...\n", - "Setting up ubuntu-mono (16.10+18.04.20181005-0ubuntu1) ...\n", - "Setting up libgtk2.0-dev (2.24.32-1ubuntu1) ...\n", - "Setting up fp-units-gtk2-3.0.4:amd64 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up fpc-3.0.4 (3.0.4+dfsg-18ubuntu2) ...\n", - "Setting up lcl-gtk2-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up lcl-units-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up fpc (3.0.4+dfsg-18ubuntu2) ...\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Setting up lcl-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up lazarus-1.8 (1.8.2+dfsg-3) ...\n", - "Setting up lazarus (1.8.2+dfsg-3) ...\n", - "Processing triggers for libc-bin (2.27-3ubuntu1) ...\n", - "Processing triggers for mime-support (3.60ubuntu1) ...\n", - "Processing triggers for libvlc-bin:amd64 (3.0.8-0ubuntu18.04.1) ...\n", - "Processing triggers for libgdk-pixbuf2.0-0:amd64 (2.36.11-2) ...\n" - ] - } - ], - "source": [ - "!apt-get install -y fpc fpc-source lazarus git subversion zip unzip" - ] - }, - { - "cell_type": "code", - "execution_count": 2, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A mtprocs/examples\n", - "A mtprocs/examples/parallelloop1.lpr\n", - "A mtprocs/examples/parallelloop_nested1.lpi\n", - "A mtprocs/examples/parallelloop_nested1.lpr\n", - "A mtprocs/examples/recursivemtp1.lpr\n", - "A mtprocs/examples/simplemtp1.lpr\n", - "A mtprocs/examples/parallelloop1.lpi\n", - "A mtprocs/examples/recursivemtp1.lpi\n", - "A mtprocs/examples/simplemtp1.lpi\n", - "A mtprocs/examples/testmtp1.lpi\n", - "A mtprocs/examples/testmtp1.lpr\n", - "A mtprocs/Readme.txt\n", - "A mtprocs/mtprocs.pas\n", - "A mtprocs/mtpcpu.pas\n", - "A mtprocs/multithreadprocslaz.lpk\n", - "A mtprocs/mtputils.pas\n", - "A mtprocs/multithreadprocslaz.pas\n", - "Checked out revision 7371.\n" - ] + "cells": [ + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "id": "LweSXQHJlq3c" + }, + "outputs": [], + "source": [ + "# This is a simple plant leaf disease classifier inspired from Data from:\n", + "# Identification of Plant Leaf Diseases Using a 9-layer Deep Convolutional Neural Network\n", + "# https://data.mendeley.com/datasets/tywbtsjrjv/1\n", + "# https://www.tensorflow.org/datasets/catalog/plant_village\n", + "\n", + "# This source code required the CAI Neural API found at:\n", + "# https://github.com/joaopauloschuler/neural-api\n", + "\n", + "# To be able to run this code, you'll need at least 32GB of RAM.\n", + "\n", + "has_plant_leaf_disease = True\n", + "has_tiny_imagenet_200 = False" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "id": "2y_lHtCNlq3h", + "outputId": "be45b99a-62a8-4843-a2ac-a0a44fda2d96", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Reading package lists... Done\n", + "Building dependency tree \n", + "Reading state information... Done\n", + "zip is already the newest version (3.0-11build1).\n", + "fpc is already the newest version (3.0.4+dfsg-23).\n", + "fpc-source is already the newest version (3.0.4+dfsg-23).\n", + "lazarus is already the newest version (2.0.6+dfsg-3).\n", + "git is already the newest version (1:2.25.1-1ubuntu3.11).\n", + "unzip is already the newest version (6.0-25ubuntu1.1).\n", + "subversion is already the newest version (1.13.0-3ubuntu0.2).\n", + "0 upgraded, 0 newly installed, 0 to remove and 13 not upgraded.\n" + ] + } + ], + "source": [ + "!apt-get install -y fpc fpc-source lazarus git subversion zip unzip" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "id": "rnnYbBVClq3j", + "outputId": "3be591f0-3977-4f60-9307-481b68208d41", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Checked out revision 8879.\n" + ] + } + ], + "source": [ + "!svn checkout https://svn.code.sf.net/p/lazarus-ccr/svn/components/multithreadprocs mtprocs" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "id": "5H6s7WQxlq3j", + "outputId": "6f49a4e2-0fba-47ae-df78-4af743f3f0a5", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Already up to date.\n", + "Processing /content/k\n", + " Preparing metadata (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + "Requirement already satisfied: pandas>=0.22.0 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (1.5.3)\n", + "Requirement already satisfied: scikit-image>=0.15.0 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (0.19.3)\n", + "Requirement already satisfied: opencv-python>=4.1.2.30 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (4.7.0.72)\n", + "Requirement already satisfied: scikit-learn>=0.21.0 in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (1.2.2)\n", + "Requirement already satisfied: numpy in /usr/local/lib/python3.10/dist-packages (from cai==0.1.7) (1.22.4)\n", + "Requirement already satisfied: python-dateutil>=2.8.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.22.0->cai==0.1.7) (2.8.2)\n", + "Requirement already satisfied: pytz>=2020.1 in /usr/local/lib/python3.10/dist-packages (from pandas>=0.22.0->cai==0.1.7) (2022.7.1)\n", + "Requirement already satisfied: scipy>=1.4.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (1.10.1)\n", + "Requirement already satisfied: networkx>=2.2 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (3.1)\n", + "Requirement already satisfied: pillow!=7.1.0,!=7.1.1,!=8.3.0,>=6.1.0 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (8.4.0)\n", + "Requirement already satisfied: imageio>=2.4.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (2.25.1)\n", + "Requirement already satisfied: tifffile>=2019.7.26 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (2023.7.4)\n", + "Requirement already satisfied: PyWavelets>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (1.4.1)\n", + "Requirement already satisfied: packaging>=20.0 in /usr/local/lib/python3.10/dist-packages (from scikit-image>=0.15.0->cai==0.1.7) (23.1)\n", + "Requirement already satisfied: joblib>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.21.0->cai==0.1.7) (1.3.1)\n", + "Requirement already satisfied: threadpoolctl>=2.0.0 in /usr/local/lib/python3.10/dist-packages (from scikit-learn>=0.21.0->cai==0.1.7) (3.1.0)\n", + "Requirement already satisfied: six>=1.5 in /usr/local/lib/python3.10/dist-packages (from python-dateutil>=2.8.1->pandas>=0.22.0->cai==0.1.7) (1.16.0)\n", + "Building wheels for collected packages: cai\n", + " Building wheel for cai (setup.py) ... \u001b[?25l\u001b[?25hdone\n", + " Created wheel for cai: filename=cai-0.1.7-py3-none-any.whl size=61379 sha256=d14cbd88959d2c2ff5a080ba00e2f0a4e66cfda9a46971a10fbc4d2187691fe7\n", + " Stored in directory: /tmp/pip-ephem-wheel-cache-4d_zus1u/wheels/80/61/f5/947bedc7e497038def7d1381fb65d37bd126a80e010114b8f1\n", + "Successfully built cai\n", + "Installing collected packages: cai\n", + " Attempting uninstall: cai\n", + " Found existing installation: cai 0.1.7\n", + " Uninstalling cai-0.1.7:\n", + " Successfully uninstalled cai-0.1.7\n", + "Successfully installed cai-0.1.7\n" + ] + } + ], + "source": [ + "import os\n", + "\n", + "if not os.path.isdir('k'):\n", + " !git clone https://github.com/joaopauloschuler/k-neural-api.git k\n", + "else:\n", + " !cd k && git pull\n", + "\n", + "!cd k && pip install ." + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "id": "pjnqf77blq3k", + "outputId": "1318cf4f-db6f-4f15-d61f-0cd97fd085a8", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Checked out revision 1773.\n" + ] + } + ], + "source": [ + "!svn checkout https://svn.code.sf.net/p/cai/svncode/trunk/lazarus neural-api" + ] + }, + { + "cell_type": "code", + "execution_count": 17, + "metadata": { + "id": "hZ-TbJbslq3l", + "outputId": "8d7389a2-c97c-4fab-9cd5-ef826004f438", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-iWTOTP\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-va\" \"compilertest.pas\"\n" + ] + } + ], + "source": [ + "!lazbuild mtprocs/multithreadprocslaz.lpk" + ] + }, + { + "cell_type": "code", + "execution_count": 18, + "metadata": { + "id": "r_8ktGAwlq3m", + "outputId": "b9baa14c-7ca2-4579-c5ea-f391c5dbf89e", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-iWTOTP\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-va\" \"compilertest.pas\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-iWTOTP\" \"-Px86_64\" \"-Tlinux\"\n", + "Hint: (lazarus) [RunTool] \"/usr/bin/fpc\" \"-va\" \"compilertest.pas\" \"-Px86_64\" \"-Tlinux\"\n", + "Info: (lazarus) Execute Title=\"Compile Project, Mode: Default, Target: /content/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\"\n", + "Info: (lazarus) Working Directory=\"/content/neural-api/examples/SimplePlantLeafDisease/\"\n", + "Info: (lazarus) Executable=\"/usr/bin/fpc\"\n", + "Info: (lazarus) Param[0]=\"-MObjFPC\"\n", + "Info: (lazarus) Param[1]=\"-Scghi\"\n", + "Info: (lazarus) Param[2]=\"-Cg\"\n", + "Info: (lazarus) Param[3]=\"-O3\"\n", + "Info: (lazarus) Param[4]=\"-l\"\n", + "Info: (lazarus) Param[5]=\"-vewnhibq\"\n", + "Info: (lazarus) Param[6]=\"-Fi/content/neural-api/neural\"\n", + "Info: (lazarus) Param[7]=\"-Fi/content/neural-api/bin/x86_64-linux/units\"\n", + "Info: (lazarus) Param[8]=\"-Fu/content/neural-api/neural\"\n", + "Info: (lazarus) Param[9]=\"-Fu/usr/lib/lazarus/2.0.6/lcl/units/x86_64-linux\"\n", + "Info: (lazarus) Param[10]=\"-Fu/usr/lib/lazarus/2.0.6/components/lazutils/lib/x86_64-linux\"\n", + "Info: (lazarus) Param[11]=\"-Fu/content/mtprocs/lib/x86_64-linux\"\n", + "Info: (lazarus) Param[12]=\"-Fu/usr/lib/lazarus/2.0.6/packager/units/x86_64-linux\"\n", + "Info: (lazarus) Param[13]=\"-Fu/content/neural-api/examples/SimplePlantLeafDisease/\"\n", + "Info: (lazarus) Param[14]=\"-FU/content/neural-api/bin/x86_64-linux/units/\"\n", + "Info: (lazarus) Param[15]=\"-FE/content/neural-api/bin/x86_64-linux/bin/\"\n", + "Info: (lazarus) Param[16]=\"-o/content/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\"\n", + "Info: (lazarus) Param[17]=\"-dUseCThreads\"\n", + "Info: (lazarus) Param[18]=\"-dAVX\"\n", + "Info: (lazarus) Param[19]=\"-dRelease\"\n", + "Info: (lazarus) Param[20]=\"SimplePlantLeafDisease.pas\"\n", + "Hint: (11030) Start of reading config file /etc/fpc.cfg\n", + "Compiling Release Version\n", + "Hint: (11031) End of reading config file /etc/fpc.cfg\n", + "Free Pascal Compiler version 3.0.4+dfsg-23 [2019/11/25] for x86_64\n", + "Copyright (c) 1993-2017 by Florian Klaempfl and others\n", + "(1002) Target OS: Linux for x86-64\n", + "(3104) Compiling SimplePlantLeafDisease.pas\n", + "/content/neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas(14,60) Hint: (5023) Unit \"math\" not used in SimplePlantLeafDisease\n", + "(9015) Linking /content/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\n", + "/usr/bin/ld.bfd: warning: /content/neural-api/bin/x86_64-linux/bin/link.res contains output sections; did you forget -T?\n", + "(1008) 95 lines compiled, 1.1 sec\n", + "(1022) 3 hint(s) issued\n" + ] + } + ], + "source": [ + "!lazbuild neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.lpi" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "id": "2ws6HVE7lq3o" + }, + "outputs": [], + "source": [ + "import cai.layers\n", + "import cai.datasets\n", + "import cai.models" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "id": "A5CSNeIclq3p" + }, + "outputs": [], + "source": [ + "if (has_tiny_imagenet_200):\n", + " url_zip_file=\"http://cs231n.stanford.edu/tiny-imagenet-200.zip\"\n", + " local_zip_file=\"tiny-imagenet-200.zip\"\n", + " expected_folder_name=\"download-tiny-imagenet-200\"\n", + " Verbose=True\n", + " cai.datasets.download_zip_and_extract(\n", + " url_zip_file=url_zip_file, local_zip_file=local_zip_file,\n", + " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", + " if os.path.isdir('download-tiny-imagenet-200/tiny-imagenet-200'):\n", + " !mv download-tiny-imagenet-200/tiny-imagenet-200 tiny-imagenet-200" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": { + "id": "xwsAMDJPlq3q" + }, + "outputs": [], + "source": [ + "if (has_plant_leaf_disease):\n", + " url_zip_file=\"https://data.mendeley.com/datasets/tywbtsjrjv/1/files/d5652a28-c1d8-4b76-97f3-72fb80f94efc/Plant_leaf_diseases_dataset_without_augmentation.zip?dl=1\"\n", + " local_zip_file=\"plant_leaf.zip\"\n", + " expected_folder_name=\"plant_leaf\"\n", + " Verbose=True\n", + " cai.datasets.download_zip_and_extract(\n", + " url_zip_file=url_zip_file, local_zip_file=local_zip_file,\n", + " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", + " if os.path.isdir('plant_leaf/Plant_leave_diseases_dataset_without_augmentation'):\n", + " !mv plant_leaf/Plant_leave_diseases_dataset_without_augmentation plant" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "P0Gb4PfFlq3r", + "outputId": "5f2a2937-cffa-44e8-ca28-6b6d0c6402b4", + "colab": { + "base_uri": "https://localhost:8080/" + } + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "RUNNING: SimplePlantLeafDisease\n", + "Creating Neural Network...\n", + " Layers: 14\n", + " Neurons:424\n", + " Weights:229058 Sum: -9.365533\n", + "Layer 0 Neurons: 0 Weights: 0 TNNetInput(96,96,3,0,0) Output:96,96,3 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Branches:1\n", + "Layer 1 Neurons: 64 Weights: 4800 TNNetConvolutionLinear(64,5,4,2,0) Output:50,50,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.8826 Bias Sum: 0.0000 Parent:0 Branches:1\n", + "Layer 2 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:1 Branches:1\n", + "Layer 3 Neurons: 1 Weights: 2 TNNetMovingStdNormalization(0,0,0,0,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 1.0000 Bias Sum: 0.0000 Parent:2 Branches:1\n", + "Layer 4 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -1.1006 Bias Sum: 0.0000 Parent:3 Branches:1\n", + "Layer 5 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:25,25,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -6.1782 Bias Sum: 0.0000 Parent:4 Branches:1\n", + "Layer 6 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:13,13,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:5 Branches:1\n", + "Layer 7 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:13,13,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.6385 Bias Sum: 0.0000 Parent:6 Branches:1\n", + "Layer 8 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:13,13,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -7.9876 Bias Sum: 0.0000 Parent:7 Branches:1\n", + "Layer 9 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,2,0) Output:7,7,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 3.1658 Bias Sum: 0.0000 Parent:8 Branches:1\n", + "Layer 10 Neurons: 0 Weights: 0 TNNetDropout(2,1,0,0,0) Output:7,7,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:9 Branches:1\n", + "Layer 11 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:4,4,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:10 Branches:1\n", + "Layer 12 Neurons: 39 Weights: 39936 TNNetFullConnectLinear(39,1,1,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.2140 Bias Sum: 0.0000 Parent:11 Branches:1\n", + "Layer 13 Neurons: 0 Weights: 0 TNNetSoftMax(0,0,0,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Bias Sum: 0.0000 Parent:12 Branches:0\n", + "Loading 100% of the Plant leave disease dataset into memory.\n", + "Training Images:49904 Validation Images:2775 Test Images:2775\n", + "File name is: SimplePlantLeafDisease\n", + "Learning rate:0.001000 L2 decay:0.000010 Inertia:0.900000 Batch size:64 Step size:64 Staircase ephocs:10 Min backprop error:0.20\n", + "Training images: 49904\n", + "Validation images: 2775\n", + "Test images: 2775\n", + "Computing...\n", + "640 Examples seen. Accuracy: 0.0175 Error: 1.93886 Loss: 3.54775 Threads: 4 Forward time: 4.94s Backward time: 4.41s Step time: 7.28s\n", + "1280 Examples seen. Accuracy: 0.0305 Error: 1.89895 Loss: 3.45915 Threads: 4 Forward time: 4.24s Backward time: 4.04s Step time: 7.51s\n", + "1920 Examples seen. Accuracy: 0.0424 Error: 1.82177 Loss: 3.06223 Threads: 4 Forward time: 4.78s Backward time: 4.41s Step time: 7.66s\n", + "2560 Examples seen. Accuracy: 0.0562 Error: 1.80944 Loss: 3.07066 Threads: 4 Forward time: 4.22s Backward time: 3.97s Step time: 7.16s\n", + "3200 Examples seen. Accuracy: 0.0674 Error: 1.77911 Loss: 2.94995 Threads: 4 Forward time: 4.23s Backward time: 3.92s Step time: 7.74s\n", + "3840 Examples seen. Accuracy: 0.0759 Error: 1.89523 Loss: 3.21194 Threads: 4 Forward time: 4.23s Backward time: 3.99s Step time: 7.09s\n", + "4480 Examples seen. Accuracy: 0.0911 Error: 1.84356 Loss: 2.92096 Threads: 4 Forward time: 4.29s Backward time: 3.99s Step time: 8.08s\n", + "5120 Examples seen. Accuracy: 0.1101 Error: 1.52856 Loss: 2.55728 Threads: 4 Forward time: 4.26s Backward time: 3.74s Step time: 7.12s\n", + "5760 Examples seen. Accuracy: 0.1262 Error: 1.60777 Loss: 2.75767 Threads: 4 Forward time: 4.30s Backward time: 3.83s Step time: 7.90s\n", + "6400 Examples seen. Accuracy: 0.1442 Error: 1.52103 Loss: 2.34901 Threads: 4 Forward time: 4.61s Backward time: 4.11s Step time: 7.03s\n", + "7040 Examples seen. Accuracy: 0.1607 Error: 1.55067 Loss: 2.34624 Threads: 4 Forward time: 4.20s Backward time: 3.78s Step time: 7.61s\n", + "7680 Examples seen. Accuracy: 0.1783 Error: 1.62708 Loss: 2.67945 Threads: 4 Forward time: 5.26s Backward time: 4.57s Step time: 7.36s\n", + "8320 Examples seen. Accuracy: 0.1947 Error: 1.55733 Loss: 2.34232 Threads: 4 Forward time: 4.27s Backward time: 3.74s Step time: 7.20s\n", + "8960 Examples seen. Accuracy: 0.2128 Error: 1.52980 Loss: 2.22569 Threads: 4 Forward time: 5.05s Backward time: 4.53s Step time: 7.69s\n", + "9600 Examples seen. Accuracy: 0.2286 Error: 1.53133 Loss: 2.16799 Threads: 4 Forward time: 4.30s Backward time: 3.94s Step time: 6.93s\n", + "10240 Examples seen. Accuracy: 0.2365 Error: 1.48934 Loss: 2.34372 Threads: 4 Forward time: 4.20s Backward time: 3.66s Step time: 7.76s\n", + "10880 Examples seen. Accuracy: 0.2477 Error: 1.60093 Loss: 2.29594 Threads: 4 Forward time: 4.17s Backward time: 3.54s Step time: 6.84s\n", + "11520 Examples seen. Accuracy: 0.2636 Error: 1.35666 Loss: 1.86014 Threads: 4 Forward time: 4.24s Backward time: 3.71s Step time: 7.45s\n", + "12160 Examples seen. Accuracy: 0.2771 Error: 1.40298 Loss: 1.80918 Threads: 4 Forward time: 4.18s Backward time: 3.44s Step time: 6.72s\n", + "12800 Examples seen. Accuracy: 0.2899 Error: 1.47205 Loss: 2.23363 Threads: 4 Forward time: 5.01s Backward time: 4.41s Step time: 7.84s\n", + "13440 Examples seen. Accuracy: 0.2992 Error: 1.54169 Loss: 2.44985 Threads: 4 Forward time: 4.21s Backward time: 3.60s Step time: 7.23s\n" + ] + } + ], + "source": [ + "if os.path.isdir('plant'):\n", + " print(\"RUNNING: SimplePlantLeafDisease\")\n", + " !neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "F1X-Ad6blq3r" + }, + "outputs": [], + "source": [] } - ], - "source": [ - "!svn checkout https://svn.code.sf.net/p/lazarus-ccr/svn/components/multithreadprocs mtprocs" - ] - }, - { - "cell_type": "code", - "execution_count": 3, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Cloning into 'k'...\n", - "remote: Enumerating objects: 240, done.\u001b[K\n", - "remote: Counting objects: 100% (240/240), done.\u001b[K\n", - "remote: Compressing objects: 100% (169/169), done.\u001b[K\n", - "remote: Total 240 (delta 148), reused 129 (delta 63), pack-reused 0\u001b[K\n", - "Receiving objects: 100% (240/240), 188.31 KiB | 2.73 MiB/s, done.\n", - "Resolving deltas: 100% (148/148), done.\n", - "Processing /tf/k\n", - "Collecting Keras>=2.2.5\n", - " Downloading Keras-2.3.1-py2.py3-none-any.whl (377 kB)\n", - "\u001b[K |████████████████████████████████| 377 kB 2.5 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting pandas>=0.22.0\n", - " Downloading pandas-1.0.3-cp36-cp36m-manylinux1_x86_64.whl (10.0 MB)\n", - "\u001b[K |████████████████████████████████| 10.0 MB 9.5 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting scikit-image>=0.15.0\n", - " Downloading scikit_image-0.16.2-cp36-cp36m-manylinux1_x86_64.whl (26.5 MB)\n", - "\u001b[K |████████████████████████████████| 26.5 MB 20.3 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting opencv-python>=4.1.2.30\n", - " Downloading opencv_python-4.2.0.34-cp36-cp36m-manylinux1_x86_64.whl (28.2 MB)\n", - "\u001b[K |████████████████████████████████| 28.2 MB 18.9 MB/s eta 0:00:01 |███▏ | 2.8 MB 18.9 MB/s eta 0:00:02\n", - "\u001b[?25hCollecting scikit-learn>=0.21.0numpy\n", - " Downloading scikit_learn-0.22.2.post1-cp36-cp36m-manylinux1_x86_64.whl (7.1 MB)\n", - "\u001b[K |████████████████████████████████| 7.1 MB 21.6 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: numpy>=1.9.1 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.18.2)\n", - "Collecting pyyaml\n", - " Downloading PyYAML-5.3.1.tar.gz (269 kB)\n", - "\u001b[K |████████████████████████████████| 269 kB 29.9 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: scipy>=0.14 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.4.1)\n", - "Requirement already satisfied: six>=1.9.0 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.14.0)\n", - "Requirement already satisfied: h5py in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (2.10.0)\n", - "Requirement already satisfied: keras-preprocessing>=1.0.5 in /usr/local/lib/python3.6/dist-packages (from Keras>=2.2.5->cai==0.0.7) (1.1.0)\n", - "Collecting keras-applications>=1.0.6\n", - " Downloading Keras_Applications-1.0.8-py3-none-any.whl (50 kB)\n", - "\u001b[K |████████████████████████████████| 50 kB 6.0 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting pytz>=2017.2\n", - " Downloading pytz-2019.3-py2.py3-none-any.whl (509 kB)\n", - "\u001b[K |████████████████████████████████| 509 kB 19.7 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: python-dateutil>=2.6.1 in /usr/local/lib/python3.6/dist-packages (from pandas>=0.22.0->cai==0.0.7) (2.8.1)\n", - "Collecting pillow>=4.3.0\n", - " Downloading Pillow-7.1.1-cp36-cp36m-manylinux1_x86_64.whl (2.1 MB)\n", - "\u001b[K |████████████████████████████████| 2.1 MB 18.9 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: matplotlib!=3.0.0,>=2.0.0 in /usr/local/lib/python3.6/dist-packages (from scikit-image>=0.15.0->cai==0.0.7) (3.2.1)\n", - "Collecting PyWavelets>=0.4.0\n", - " Downloading PyWavelets-1.1.1-cp36-cp36m-manylinux1_x86_64.whl (4.4 MB)\n", - "\u001b[K |████████████████████████████████| 4.4 MB 21.5 MB/s eta 0:00:01 |▏ | 20 kB 17.4 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting networkx>=2.0\n", - " Downloading networkx-2.4-py3-none-any.whl (1.6 MB)\n", - "\u001b[K |████████████████████████████████| 1.6 MB 21.3 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting imageio>=2.3.0\n", - " Downloading imageio-2.8.0-py3-none-any.whl (3.3 MB)\n", - "\u001b[K |████████████████████████████████| 3.3 MB 11.3 MB/s eta 0:00:01\n", - "\u001b[?25hCollecting joblib>=0.11\n", - " Downloading joblib-0.14.1-py2.py3-none-any.whl (294 kB)\n", - "\u001b[K |████████████████████████████████| 294 kB 28.3 MB/s eta 0:00:01\n", - "\u001b[?25hRequirement already satisfied: pyparsing!=2.0.4,!=2.1.2,!=2.1.6,>=2.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (2.4.6)\n", - "Requirement already satisfied: kiwisolver>=1.0.1 in /usr/local/lib/python3.6/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (1.1.0)\n", - "Requirement already satisfied: cycler>=0.10 in /usr/local/lib/python3.6/dist-packages (from matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (0.10.0)\n", - "Requirement already satisfied: decorator>=4.3.0 in /usr/local/lib/python3.6/dist-packages (from networkx>=2.0->scikit-image>=0.15.0->cai==0.0.7) (4.4.2)\n", - "Requirement already satisfied: setuptools in /usr/local/lib/python3.6/dist-packages (from kiwisolver>=1.0.1->matplotlib!=3.0.0,>=2.0.0->scikit-image>=0.15.0->cai==0.0.7) (46.0.0)\n", - "Building wheels for collected packages: cai, pyyaml\n", - " Building wheel for cai (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for cai: filename=cai-0.0.7-py3-none-any.whl size=16210 sha256=7fb10e66ba3a06bf428518b1c3d5cd01d63c10beff6e84ceedbff808c1f491e2\n", - " Stored in directory: /tmp/pip-ephem-wheel-cache-32wxu25a/wheels/f0/08/19/56f64e8c8cc45b0390e5e7e2f634c4c1aa0212065044fb6442\n", - " Building wheel for pyyaml (setup.py) ... \u001b[?25ldone\n", - "\u001b[?25h Created wheel for pyyaml: filename=PyYAML-5.3.1-cp36-cp36m-linux_x86_64.whl size=45919 sha256=a0d3189b4eaed85e19ae9d76693a74dd9f833c1af09a63c9c40a3760c45127e7\n", - " Stored in directory: /root/.cache/pip/wheels/e5/9d/ad/2ee53cf262cba1ffd8afe1487eef788ea3f260b7e6232a80fc\n", - "Successfully built cai pyyaml\n", - "Installing collected packages: pyyaml, keras-applications, Keras, pytz, pandas, pillow, PyWavelets, networkx, imageio, scikit-image, opencv-python, joblib, scikit-learn, cai\n", - "Successfully installed Keras-2.3.1 PyWavelets-1.1.1 cai-0.0.7 imageio-2.8.0 joblib-0.14.1 keras-applications-1.0.8 networkx-2.4 opencv-python-4.2.0.34 pandas-1.0.3 pillow-7.1.1 pytz-2019.3 pyyaml-5.3.1 scikit-image-0.16.2 scikit-learn-0.22.2.post1\n" - ] + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + }, + "colab": { + "provenance": [], + "machine_shape": "hm" } - ], - "source": [ - "import os\n", - "\n", - "if not os.path.isdir('k'):\n", - " !git clone https://github.com/joaopauloschuler/k-neural-api.git k\n", - "else:\n", - " !cd k && git pull\n", - "\n", - "!cd k && pip install ." - ] - }, - { - "cell_type": "code", - "execution_count": 4, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A neural-api/neural\n", - "A neural-api/examples\n", - "A neural-api/examples/SuperResolution\n", - "A neural-api/examples/SimplePlantLeafDisease\n", - "A neural-api/examples/SimpleTinyImageNet\n", - "A neural-api/examples/CaiOptimizedDenseNet\n", - "A neural-api/examples/CaiOptimizedDenseNet/results\n", - "A neural-api/examples/SimpleImageClassifier\n", - "A neural-api/examples/SimpleImageClassifier/results\n", - "A neural-api/examples/SimpleTinyImageNet/SimpleTinyImageNet.pas\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpr\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpr\n", - "A neural-api/neural/neuraldatasets.pas\n", - "A neural-api/examples/SimpleFashionMNIST\n", - "A neural-api/examples/SimpleFashionMNIST/results\n", - "A neural-api/examples/VisualGANTinyImagenet\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpr\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpr\n", - "A neural-api/examples/SimpleImageClassifier/results/SimpleImageClassifier20191012.csv\n", - "A neural-api/neural/neuraldatasetsv.pas\n", - "A neural-api/neural/neuralvolumev.pas\n", - "A neural-api/examples/CaiOptimizedDenseNet/results/CaiOptimizedDenseNet20191018.csv\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize64.lpi\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierResize48.lpi\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifier.lpi\n", - "A neural-api/examples/SimpleFashionMNIST/results/SimpleFashionMNIST20191018.csv\n", - "A neural-api/examples/GradientAscent\n", - "A neural-api/examples/SimpleImageClassifier/results/SimpleImageClassifier20191102.csv\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierCPU.ipynb\n", - "A neural-api/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpi\n", - "A neural-api/examples/VisualGANTinyImagenet/uvisualgantinyimagenet.lfm\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.lpr\n", - "A neural-api/examples/VisualGAN\n", - "A neural-api/neural/neuralvolume.pas\n", - "A neural-api/neural/neuralfit.pas\n", - "A neural-api/examples/GradientAscent/ugradientascent.pas\n", - "A neural-api/examples/GradientAscent/GradientAscent.lpi\n", - "A neural-api/examples/VisualGAN/uvisualgan.lfm\n", - "A neural-api/neural/neuralnetwork.pas\n", - "A neural-api/neural/neuralthread.pas\n", - "A neural-api/examples/ViewInnerPatterns\n", - "A neural-api/neural/neuralopencl.pas\n", - "A neural-api/neural/neuralbit.pas\n", - "A neural-api/neural/neuralnetwork.inc\n", - "A neural-api/examples/VisualGAN/VisualGAN.lpr\n", - "A neural-api/neural/neuralevolutionary.pas\n", - "A neural-api/examples/ImageClassifierSELU\n", - "A neural-api/examples/ImageClassifierSELU/results\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.ico\n", - "A neural-api/neural/neuralopenclv.pas\n", - "A neural-api/neural/neuralbyteprediction.pas\n", - "A neural-api/neural/neuralab.pas\n", - "A neural-api/examples/DenseNetBCL40\n", - "A neural-api/examples/DenseNetBCL40/results\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.res\n", - "A neural-api/examples/GradientAscent/GradientAscent.lpr\n", - "A neural-api/neural/neuralgeneric.pas\n", - "A neural-api/examples/ViewInnerPatterns/uviewinnerpatterns.lfm\n", - "A neural-api/examples/GradientAscent/GradientAscent.res\n", - "A neural-api/examples/DenseNetFashionMNIST\n", - "A neural-api/examples/DenseNetFashionMNIST/results\n", - "A neural-api/neural/neuralplanbuilder.pas\n", - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.lpr\n", - "A neural-api/examples/VisualGAN/VisualGAN.lpi\n", - "A neural-api/neural/readme.txt\n", - "A neural-api/examples/ImageClassifierSELU/results/ImageClassifierSELU20191102.csv\n", - "A neural-api/examples/Cifar100CaiDenseNet\n", - "A neural-api/examples/Cifar100CaiDenseNet/results\n", - "A neural-api/examples/SimpleMNist\n", - "A neural-api/examples/VisualGAN/VisualGAN.res\n", - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.ico\n", - "A neural-api/examples/ImageClassifierSELU/ImageClassifierSELU.lpi\n", - "A neural-api/neural/neural.cl\n", - "A neural-api/examples/SimpleMNist/results\n", - "A neural-api/examples/IdentityShortcutConnection\n", - "A neural-api/examples/DenseNetBCL40/results/DenseNetBLC40-20191024.csv\n", - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.res\n", - "A neural-api/neural/neuralabfun.pas\n", - "A neural-api/neural/neuralasm.inc\n", - "A neural-api/examples/SeparableConvolution\n", - "A neural-api/examples/SimpleImageClassifierGPU\n", - "A neural-api/examples/ImageClassifierSELU/ImageClassifierSELU.lpr\n", - "A neural-api/examples/DenseNetFashionMNIST/results/DenseNetFashionMNIST-20191025.csv\n", - "A neural-api/neural/neuralcache.pas\n", - "A neural-api/examples/SuperResolution/SuperResolutionTrain.lpi\n", - "A neural-api/examples/DenseNetBCL40/results/DenseNetBLC40-SELU.csv\n", - "A neural-api/examples/DenseNetBCL40/DenseNetBCL40.lpr\n", - "A neural-api/examples/XorAndOr\n", - "A neural-api/experiments\n", - "A neural-api/experiments/IncreaseResolution\n", - "A neural-api/examples/SuperResolution/SuperResolutionTrain.lpr\n", - "A neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.lpi\n", - "A neural-api/examples/DenseNetFashionMNIST/DenseNetFashionMNIST.lpr\n", - "A neural-api/examples/Cifar100CaiDenseNet/Cifar100CaiDenseNet.lpr\n", - "A neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas\n", - "A neural-api/examples/Cifar100CaiDenseNet/results/Cifar100CaiDenseNet20191020.csv\n", - "A neural-api/examples/SimpleTinyImageNet/SimpleTinyImageNet.lpi\n", - "A neural-api/examples/SimpleMNist/SimpleMNist.lpr\n", - "A neural-api/examples/IdentityShortcutConnection/IdentityShortcutConnection.lpi\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet48.lpi\n", - "A neural-api/examples/SimpleMNist/results/SimpleMNist20191014.csv\n", - "A neural-api/examples/SimpleMNist/results/SimpleMNist20191014.png\n", - "A neural-api/examples/SeparableConvolution/SeparableConvolution.lpi\n", - "A neural-api/examples/IdentityShortcutConnection/IdentityShortcutConnection.ipynb\n", - "A neural-api/examples/IdentityShortcutConnection/IdentityShortcutConnection.pas\n", - "A neural-api/experiments/tinyImageNet200\n", - "A neural-api/examples/XorAndOr/XorAndOr.lpi\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.ipynb\n", - "A neural-api/examples/CaiOptimizedDenseNet/CaiOptimizedDenseNet.lpi\n", - "A neural-api/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpr\n", - "A neural-api/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.ipynb\n", - "A neural-api/examples/XorAndOr/XorAndOr.lpr\n", - "A neural-api/experiments/visualCifar10BatchUpdate\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.lpi\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.lpr\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.res\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.res\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.lpr\n", - "A neural-api/experiments/supersimple\n", - "A neural-api/experiments/testcnnalgo\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionlearn.lfm\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.ico\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionapp.lfm\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionlearn.pas\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifierCPUResize48.ipynb\n", - "A neural-api/examples/SimpleImageClassifier/SimpleImageClassifier.lpr\n", - "A neural-api/examples/SimpleImageClassifier/results/SimpleImageClassifier20191111.csv\n", - "A neural-api/examples/SimpleFashionMNIST/SimpleFashionMNIST.lpr\n", - "A neural-api/experiments/visualCifar10AnimalMachine\n", - "A neural-api/examples/VisualGANTinyImagenet/uvisualgantinyimagenet.pas\n", - "A neural-api/experiments/IncreaseResolution/uresizeutil.pas\n", - "A neural-api/experiments/visualCifar10OpenCL\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.lpi\n", - "A neural-api/examples/VisualGANTinyImagenet/VisualGANTinyImagenet.lpi\n", - "A neural-api/experiments/tinyImageNet200/utinyimagenet200.pas\n", - "A neural-api/examples/GradientAscent/ugradientascent.lfm\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.lpi\n", - "A neural-api/examples/GradientAscent/GradientAscent.ico\n", - "A neural-api/experiments/visualCifar10learning\n", - "A neural-api/examples/VisualGAN/uvisualgan.pas\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.res\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.lpr\n", - "A neural-api/examples/VisualGAN/VisualGAN.ico\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.lpr\n", - "A neural-api/experiments/visualCifar10learning2\n", - "A neural-api/examples/ViewInnerPatterns/uviewinnerpatterns.pas\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A neural-api/examples/ViewInnerPatterns/ViewInnerPatterns.lpi\n", - "A neural-api/examples/ImageClassifierSELU/results/ImageClassifierSELU20191109.csv\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.ico\n", - "A neural-api/experiments/testcnnalgo/testcnnalgo.lpr\n", - "A neural-api/experiments/visualCifar10test\n", - "A neural-api/examples/ImageClassifierSELU/ImageClassifierSELU.ipynb\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.res\n", - "A neural-api/examples/DenseNetBCL40/results/DenseNetBLC40-I0.5-20191028.csv\n", - "A neural-api/experiments/visualCifar10BatchUpdate/uvisualcifar10learningbatchupdate.lfm\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.lpi\n", - "A neural-api/examples/DenseNetBCL40/DenseNetBCL40.lpi\n", - "A neural-api/experiments/3dCellularAutomata\n", - "A neural-api/experiments/testcnnalgo/testcnnalgo.lpi\n", - "A neural-api/examples/DenseNetFashionMNIST/DenseNetFashionMNIST.lpi\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.lpi\n", - "A neural-api/examples/Cifar100CaiDenseNet/Cifar100CaiDenseNet.lpi\n", - "A neural-api/examples/SimpleMNist/SimpleMNist.lpi\n", - "A neural-api/experiments/ConwayGameOfLife\n", - "A neural-api/examples/SeparableConvolution/SeparableConvolution.lpr\n", - "A neural-api/experiments/visualCifar10OpenCL/uvisualcifar10learningopencl.pas\n", - "A neural-api/examples/SimpleImageClassifierGPU/SimpleImageClassifierGPU.lpi\n", - "A neural-api/experiments/visualCifar10BatchUpdate/visualCifar10learningBatchUpdate.res\n", - "A neural-api/experiments/supersimple/supersimple.lpr\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionApp.ico\n", - "A neural-api/experiments/IncreaseResolution/IncreaseResolutionLearning.lpi\n", - "A neural-api/experiments/visualCifar10AnimalMachine/uvisualcifar10animalmachine.pas\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.lpr\n", - "A neural-api/experiments/IncreaseResolution/uincreaseresolutionapp.pas\n", - "A neural-api/experiments/visualCifar10OpenCL/uvisualcifar10learningopencl.lfm\n", - "A neural-api/experiments/tinyImageNet200/utinyimagenet200.lfm\n", - "A neural-api/experiments/LifeAppearance\n", - "A neural-api/experiments/visualCifar10learning/uvisualcifar10learning.pas\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.lpr\n", - "A neural-api/experiments/tinyImageNet200/tinyImageNet200Update.ico\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.res\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.lpr\n", - "A neural-api/experiments/visualCifar10BatchUpdate/uvisualcifar10learningbatchupdate.pas\n", - "A neural-api/experiments/MagicSquare\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.ico\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.lpi\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.res\n", - "A neural-api/experiments/supersimple/supersimple.lpi\n", - "A neural-api/experiments/visualCifar10learning2/uvisualcifar10learning.pas\n", - "A neural-api/experiments/visualCifar10test/uvisualcifar10test.pas\n", - "A neural-api/experiments/visualCifar10AnimalMachine/uvisualcifar10animalmachine.lfm\n", - "A neural-api/experiments/visualCifar10learning2/uvisualcifar10learning.lfm\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.lpr\n", - "A neural-api/experiments/visualCifar10AnimalMachine/visualCifar10AnimalMachine.ico\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.lpr\n", - "A neural-api/experiments/visualCifar10OpenCL/visualCifar10learningOpenCL.res\n", - "A neural-api/experiments/NeuralWebServer\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.res\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.ico\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.lpi\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.lpi\n", - "A neural-api/experiments/visualCifar10learning/uvisualcifar10learning.lfm\n", - "A neural-api/experiments/NineMensMorris\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.res\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.ico\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.lpi\n", - "A neural-api/experiments/visualCifar10learning/visualCifar10learning.ico\n", - "A neural-api/experiments/visualCifar10learning2/visualCifar10learning2.lpi\n", - "A neural-api/experiments/ConwayGameOfLife/UNIT1.DFM\n", - "A neural-api/experiments/3dCellularAutomata/Unit1.lfm\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.lpr\n", - "A neural-api/experiments/3dCellularAutomata/Unit1.pas\n", - "A neural-api/experiments/ConwayGameOfLife/lifeai.lfm\n", - "A neural-api/experiments/visualCifar10test/uvisualcifar10test.lfm\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.res\n", - "A neural-api/experiments/visualCifar10test/visualCifar10test.ico\n", - "A neural-api/experiments/3dCellularAutomata/CellularAutomata3d.lpr\n", - "A neural-api/experiments/ConwayGameOfLife/JHC_GameOfLife.ico\n", - "A neural-api/experiments/ConwayGameOfLife/UNIT1.PAS\n", - "A neural-api/experiments/ConwayGameOfLife/lifeai.pas\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.res\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.lpr\n", - "A neural-api/experiments/MagicSquare/MAGIC.ICO\n", - "A neural-api/experiments/ConwayGameOfLife/about.lfm\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.ico\n", - "A neural-api/experiments/SOM-NeuralNetwork\n", - "A neural-api/experiments/ConwayGameOfLife/LIFE2.ico\n", - "A neural-api/experiments/ConwayGameOfLife/about.pas\n", - "A neural-api/experiments/MagicSquare/MagicSquare.ico\n", - "A neural-api/experiments/MagicSquare/MagicSquare.res\n", - "A neural-api/experiments/mining\n", - "A neural-api/experiments/LifeAppearance/LifeAppearance.lpi\n", - "A neural-api/experiments/MagicSquare/UDARR.PAS\n", - "A neural-api/experiments/LifeAppearance/Unit1.pas\n", - "A neural-api/experiments/LifeAppearance/Unit1.lfm\n", - "A neural-api/experiments/MagicSquare/MagicSquare.lpi\n", - "A neural-api/experiments/MagicSquare/MagicSquare.lpr\n", - "A neural-api/experiments/MagicSquare/magic2.ico\n", - "A neural-api/experiments/MagicSquare/quada6g.lfm\n", - "A neural-api/experiments/MagicSquare/uabout.dfm\n", - "A neural-api/experiments/MagicSquare/quada6g.pas\n", - "A neural-api/experiments/NeuralWebServer/README.txt\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.lpr\n", - "A neural-api/experiments/supersimplecorrelation\n", - "A neural-api/experiments/supersimplehyperbolictangent\n", - "A neural-api/experiments/MagicSquare/uabout.pas\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.lpi\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.ico\n", - "A neural-api/experiments/NeuralWebServer/usimpleneuralwebserver.lfm\n", - "A neural-api/experiments/NeuralWebServer/usimpleneuralwebserver.pas\n", - "A neural-api/experiments/visualCifar10MT\n", - "A neural-api/experiments/NineMensMorris/MOINHO.lpr\n", - "A neural-api/experiments/NineMensMorris/MOINHO.ico\n", - "A neural-api/experiments/NineMensMorris/MOINHO.res\n", - "A neural-api/experiments/NineMensMorris/UAUX.pas\n", - "A neural-api/experiments/NineMensMorris/UVence.pas\n", - "A neural-api/experiments/visualCifar10NTL\n", - "A neural-api/experiments/NineMensMorris/TAB1.BMP\n", - "A neural-api/experiments/NineMensMorris/UIA.PAS\n", - "A neural-api/experiments/NineMensMorris/UMOINHO.PAS\n", - "A neural-api/experiments/NineMensMorris/princ.pas\n", - "A neural-api/experiments/NineMensMorris/dialog.lfm\n", - "A neural-api/docs\n", - "A neural-api/libs\n", - "A neural-api/experiments/NineMensMorris/dialog.pas\n", - "A neural-api/experiments/SOM-NeuralNetwork/URSOM.PAS\n", - "A neural-api/experiments/SOM-NeuralNetwork/PSOM1.lpi\n", - "A neural-api/experiments/SOM-NeuralNetwork/PSOM1.lpr\n", - "A neural-api/experiments/mining/PMinera.lpi\n", - "A neural-api/experiments/SOM-NeuralNetwork/USOM1.lfm\n", - "A neural-api/experiments/SOM-NeuralNetwork/USOM1.pas\n", - "A neural-api/experiments/mining/UFRob1.pas\n", - "A neural-api/experiments/mining/PMinera.lpr\n", - "A neural-api/experiments/mining/PMinera.res\n", - "A neural-api/experiments/mining/UForOptMin.pas\n", - "A neural-api/experiments/mining/UForOptMin.lfm\n", - "A neural-api/experiments/mining/UVPlan.lfm\n", - "A neural-api/experiments/mining/URobMin2.pas\n", - "A neural-api/experiments/mining/UVPlan.pas\n", - "A neural-api/experiments/supersimplehyperbolictangent/supersimplehyperbolictangent.lpi\n", - "A neural-api/experiments/supersimplehyperbolictangent/supersimplehyperbolictangent.lpr\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "A neural-api/experiments/supersimplecorrelation/supersimplecorrelation.lpi\n", - "A neural-api/experiments/visualCifar10MT/uvisualcifar10learningmt.lfm\n", - "A neural-api/experiments/visualCifar10MT/uvisualcifar10learningmt.pas\n", - "A neural-api/experiments/visualCifar10NTL/uvisualcifar10learningmt.lfm\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.res\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.ico\n", - "A neural-api/experiments/visualCifar10NTL/uvisualcifar10learningmt.pas\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.lpr\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.lpi\n", - "A neural-api/libs/backup\n", - "A neural-api/opencl\n", - "A neural-api/opencl/dot-product-test\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.res\n", - "A neural-api/LICENSE-EXCEPTION.LGPL\n", - "A neural-api/LICENSE\n", - "A neural-api/readme.txt\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.exe\n", - "A neural-api/experiments/NeuralWebServer/SimpleNeuralWebServer.res\n", - "A neural-api/experiments/NineMensMorris/MOINHO.lpi\n", - "A neural-api/experiments/NineMensMorris/UAUX.lfm\n", - "A neural-api/experiments/NineMensMorris/UVence.lfm\n", - "A neural-api/opencl/easy-trillion-test\n", - "A neural-api/libs/uconvolutionneuralnetwork.pas\n", - "A neural-api/libs/neuralnetwork.inc\n", - "A neural-api/libs/ueasyopencl.pas\n", - "A neural-api/experiments/NineMensMorris/princ.lfm\n", - "A neural-api/experiments/SOM-NeuralNetwork/PSOM1.res\n", - "A neural-api/experiments/mining/PMinera.ico\n", - "A neural-api/experiments/mining/UFRob1.lfm\n", - "A neural-api/experiments/mining/URobMin.pas\n", - "A neural-api/experiments/supersimplecorrelation/supersimplecorrelation.lpr\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.lpi\n", - "A neural-api/experiments/visualCifar10MT/visualCifar10learningMT.lpr\n", - "A neural-api/opencl/trillion-test\n", - "A neural-api/libs/uvolume.pas\n", - "A neural-api/experiments/visualCifar10NTL/visualCifar10learningMT2.ico\n", - "A neural-api/thirdpartylibs\n", - "A neural-api/thirdpartylibs/synapse\n", - "A neural-api/docs/cai.png\n", - "A neural-api/libs/readme.txt\n", - "A neural-api/libs/not_in_use_kernels.cl\n", - "A neural-api/libs/uab.pas\n", - "A neural-api/libs/neuralasm.inc\n", - "A neural-api/bin\n", - "A neural-api/bin/x86_64-win64\n", - "A neural-api/bin/x86_64-win64/bin\n", - "A neural-api/libs/ucifar10lcl.pas\n", - "A neural-api/libs/ubit.pas\n", - "A neural-api/libs/uabfun.pas\n", - "A neural-api/libs/udum.pas\n", - "A neural-api/libs/ucifar10.pas\n", - "A neural-api/libs/cai_dot_product.cl\n", - "A neural-api/libs/uplanbuilder.pas\n", - "A neural-api/libs/uvolumelcl.pas\n", - "A neural-api/libs/ubyteprediction.pas\n", - "A neural-api/libs/uevolutionary.pas\n", - "A neural-api/libs/uarraycache.pas\n", - "A neural-api/libs/COPYING.txt\n", - "A neural-api/opencl/dot-product-test/dot_product_test.lpr\n", - "A neural-api/libs/ugeneric.pas\n", - "A neural-api/libs/ueasyopenclcl.pas\n", - "A neural-api/libs/ubackpropagation.pas\n", - "A neural-api/opencl/dot-product-test/dot_product_test.res\n", - "A neural-api/opencl/dot-product-test/dot_product_test.lpi\n", - "A neural-api/opencl/dot-product-test/dot_product_test.ico\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.lpi\n", - "A neural-api/opencl/easy-trillion-test/ueasy_trillion_test_form.lfm\n", - "A neural-api/opencl/dot-product-test/dot_product_test_form.pas\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.res\n", - "A neural-api/libs/ntl.pas\n", - "A neural-api/opencl/dot-product-test/dot_product_test_form.lfm\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.lpr\n", - "A neural-api/opencl/easy-trillion-test/ueasy_trillion_test_form.pas\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.lpi\n", - "A neural-api/opencl/easy-trillion-test/easy_trillion_test.ico\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.lpr\n", - "A neural-api/opencl/easy-trillion-test/evolve_easy.cl\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.res\n", - "A neural-api/opencl/trillion-test/uopencl_trillion_test.pas\n", - "A neural-api/opencl/trillion-test/evolve_billion.cl\n", - "A neural-api/opencl/trillion-test/evolve_trillion.cl\n", - "A neural-api/opencl/trillion-test/README.TXT\n", - "A neural-api/opencl/trillion-test/uopencl_trillion_test.lfm\n", - "A neural-api/opencl/trillion-test/Frm_OpenCLTestMain.pas\n", - "A neural-api/opencl/trillion-test/opencl_trillion_test.ico\n", - "A neural-api/bin/readme.txt\n", - "A neural-api/thirdpartylibs/synapse/README.txt\n", - "Checked out revision 1286.\n" - ] - } - ], - "source": [ - "!svn checkout https://svn.code.sf.net/p/cai/svncode/trunk/lazarus neural-api" - ] - }, - { - "cell_type": "code", - "execution_count": 5, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "CopySecondaryConfigFile /etc/lazarus/environmentoptions.xml -> /root/.lazarus/environmentoptions.xml\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"compilertest.pas\"\n", - "Hint: (lazarus) Missing state file of MultiThreadProcsLaz 1.2.1: /tf/mtprocs/lib/x86_64-linux/MultiThreadProcsLaz.compiled\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\" \"-Px86_64\" \"-Tlinux\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"-Px86_64\" \"-Tlinux\" \"compilertest.pas\"\n", - "Info: (lazarus) Execute Title=\"Compile package MultiThreadProcsLaz 1.2.1\"\n", - "Info: (lazarus) Working Directory=\"/tf/mtprocs/\"\n", - "Info: (lazarus) Executable=\"/usr/bin/fpc\"\n", - "Info: (lazarus) Param[0]=\"-B\"\n", - "Info: (lazarus) Param[1]=\"-MObjFPC\"\n", - "Info: (lazarus) Param[2]=\"-Scghi\"\n", - "Info: (lazarus) Param[3]=\"-Cg\"\n", - "Info: (lazarus) Param[4]=\"-O1\"\n", - "Info: (lazarus) Param[5]=\"-g\"\n", - "Info: (lazarus) Param[6]=\"-gl\"\n", - "Info: (lazarus) Param[7]=\"-l\"\n", - "Info: (lazarus) Param[8]=\"-vewnhibq\"\n", - "Info: (lazarus) Param[9]=\"-Fu/usr/lib/lazarus/1.8.2/packager/units/x86_64-linux\"\n", - "Info: (lazarus) Param[10]=\"-Fu/tf/mtprocs/\"\n", - "Info: (lazarus) Param[11]=\"-FU/tf/mtprocs/lib/x86_64-linux/\"\n", - "Info: (lazarus) Param[12]=\"multithreadprocslaz.pas\"\n", - "Hint: (11030) Start of reading config file /etc/fpc.cfg\n", - "Hint: (11031) End of reading config file /etc/fpc.cfg\n", - "Free Pascal Compiler version 3.0.4+dfsg-18ubuntu2 [2018/08/29] for x86_64\n", - "Copyright (c) 1993-2017 by Florian Klaempfl and others\n", - "(1002) Target OS: Linux for x86-64\n", - "(3104) Compiling multithreadprocslaz.pas\n", - "(3104) Compiling mtprocs.pas\n", - "(3104) Compiling mtpcpu.pas\n", - "(3104) Compiling mtputils.pas\n", - "/tf/mtprocs/mtputils.pas(40,43) Hint: (5024) Parameter \"Data\" not used\n", - "/tf/mtprocs/multithreadprocslaz.pas(10,10) Hint: (5023) Unit \"MTPUtils\" not used in MultiThreadProcsLaz\n", - "/tf/mtprocs/multithreadprocslaz.pas(10,20) Hint: (5023) Unit \"MTPCPU\" not used in MultiThreadProcsLaz\n", - "(1008) 1215 lines compiled, 0.1 sec\n", - "(1022) 5 hint(s) issued\n" - ] - } - ], - "source": [ - "!lazbuild mtprocs/multithreadprocslaz.lpk" - ] - }, - { - "cell_type": "code", - "execution_count": 14, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"compilertest.pas\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-iWTOTP\" \"-Px86_64\" \"-Tlinux\"\n", - "Hint: (lazarus) [RunTool] /usr/bin/fpc \"-va\" \"-Px86_64\" \"-Tlinux\" \"compilertest.pas\"\n", - "Info: (lazarus) Execute Title=\"Compile Project, Mode: Default, Target: /tf/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\"\n", - "Info: (lazarus) Working Directory=\"/tf/neural-api/examples/SimplePlantLeafDisease/\"\n", - "Info: (lazarus) Executable=\"/usr/bin/fpc\"\n", - "Info: (lazarus) Param[0]=\"-MObjFPC\"\n", - "Info: (lazarus) Param[1]=\"-Scghi\"\n", - "Info: (lazarus) Param[2]=\"-Cg\"\n", - "Info: (lazarus) Param[3]=\"-O3\"\n", - "Info: (lazarus) Param[4]=\"-l\"\n", - "Info: (lazarus) Param[5]=\"-vewnhibq\"\n", - "Info: (lazarus) Param[6]=\"-Fi/tf/neural-api/neural\"\n", - "Info: (lazarus) Param[7]=\"-Fi/tf/neural-api/bin/x86_64-linux/units\"\n", - "Info: (lazarus) Param[8]=\"-Fu/tf/neural-api/neural\"\n", - "Info: (lazarus) Param[9]=\"-Fu/usr/lib/lazarus/1.8.2/lcl/units/x86_64-linux\"\n", - "Info: (lazarus) Param[10]=\"-Fu/usr/lib/lazarus/1.8.2/components/lazutils/lib/x86_64-linux\"\n", - "Info: (lazarus) Param[11]=\"-Fu/tf/mtprocs/lib/x86_64-linux\"\n", - "Info: (lazarus) Param[12]=\"-Fu/usr/lib/lazarus/1.8.2/packager/units/x86_64-linux\"\n", - "Info: (lazarus) Param[13]=\"-Fu/tf/neural-api/examples/SimplePlantLeafDisease/\"\n", - "Info: (lazarus) Param[14]=\"-FU/tf/neural-api/bin/x86_64-linux/units/\"\n", - "Info: (lazarus) Param[15]=\"-FE/tf/neural-api/bin/x86_64-linux/bin/\"\n", - "Info: (lazarus) Param[16]=\"-dUseCThreads\"\n", - "Info: (lazarus) Param[17]=\"-dAVX\"\n", - "Info: (lazarus) Param[18]=\"-dRelease\"\n", - "Info: (lazarus) Param[19]=\"SimplePlantLeafDisease.pas\"\n", - "Hint: (11030) Start of reading config file /etc/fpc.cfg\n", - "Compiling Release Version\n", - "Hint: (11031) End of reading config file /etc/fpc.cfg\n", - "Free Pascal Compiler version 3.0.4+dfsg-18ubuntu2 [2018/08/29] for x86_64\n", - "/usr/bin/ld.bfd: warning: /tf/neural-api/bin/x86_64-linux/bin/link.res contains output sections; did you forget -T?\n", - "Copyright (c) 1993-2017 by Florian Klaempfl and others\n", - "(1002) Target OS: Linux for x86-64\n", - "(3104) Compiling SimplePlantLeafDisease.pas\n", - "/tf/neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.pas(14,60) Hint: (5023) Unit \"math\" not used in SimplePlantLeafDisease\n", - "(9015) Linking /tf/neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease\n", - "(1008) 95 lines compiled, 0.5 sec\n", - "(1022) 3 hint(s) issued\n" - ] - } - ], - "source": [ - "!lazbuild neural-api/examples/SimplePlantLeafDisease/SimplePlantLeafDisease.lpi" - ] - }, - { - "cell_type": "code", - "execution_count": 7, - "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "Using TensorFlow backend.\n" - ] - } - ], - "source": [ - "import cai.layers\n", - "import cai.datasets\n", - "import cai.models" - ] - }, - { - "cell_type": "code", - "execution_count": 8, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Downloading: http://cs231n.stanford.edu/tiny-imagenet-200.zip to tiny-imagenet-200.zip\n", - "Decompressing into: tiny-imagenet-200\n" - ] - } - ], - "source": [ - "if (has_tiny_imagenet_200):\n", - " url_zip_file=\"http://cs231n.stanford.edu/tiny-imagenet-200.zip\"\n", - " local_zip_file=\"tiny-imagenet-200.zip\"\n", - " expected_folder_name=\"download-tiny-imagenet-200\"\n", - " Verbose=True\n", - " cai.datasets.download_zip_and_extract(\n", - " url_zip_file=url_zip_file, local_zip_file=local_zip_file, \n", - " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", - " if os.path.isdir('download-tiny-imagenet-200/tiny-imagenet-200'):\n", - " !mv download-tiny-imagenet-200/tiny-imagenet-200 tiny-imagenet-200" - ] - }, - { - "cell_type": "code", - "execution_count": 12, - "metadata": {}, - "outputs": [], - "source": [ - "if (has_plant_leaf_disease):\n", - " url_zip_file=\"https://data.mendeley.com/datasets/tywbtsjrjv/1/files/d5652a28-c1d8-4b76-97f3-72fb80f94efc/Plant_leaf_diseases_dataset_without_augmentation.zip?dl=1\"\n", - " local_zip_file=\"plant_leaf.zip\"\n", - " expected_folder_name=\"plant_leaf\"\n", - " Verbose=True\n", - " cai.datasets.download_zip_and_extract(\n", - " url_zip_file=url_zip_file, local_zip_file=local_zip_file, \n", - " expected_folder_name=expected_folder_name, Verbose=Verbose)\n", - " if os.path.isdir('plant_leaf/Plant_leave_diseases_dataset_without_augmentation'):\n", - " !mv plant_leaf/Plant_leave_diseases_dataset_without_augmentation plant" - ] - }, - { - "cell_type": "code", - "execution_count": 15, - "metadata": {}, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "RUNNING: SimplePlantLeafDisease\n", - "Creating Neural Network...\n", - " Layers: 14\n", - " Neurons:424\n", - " Weights:251522 Sum: 1.066439\n", - "Layer 0 Neurons: 0 Weights: 0 TNNetInput(128,128,3,0,0) Output:128,128,3 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Branches:1\n", - "Layer 1 Neurons: 64 Weights: 4800 TNNetConvolutionLinear(64,5,4,2,0) Output:66,66,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 9.3083 Parent:0 Branches:1\n", - "Layer 2 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:1 Branches:1\n", - "Layer 3 Neurons: 1 Weights: 2 TNNetMovingStdNormalization(0,0,0,0,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 1.0000 Parent:2 Branches:1\n", - "Layer 4 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum:-16.7340 Parent:3 Branches:1\n", - "Layer 5 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:33,33,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -2.0621 Parent:4 Branches:1\n", - "Layer 6 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:17,17,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:5 Branches:1\n", - "Layer 7 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:17,17,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: -3.9453 Parent:6 Branches:1\n", - "Layer 8 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,1,0) Output:17,17,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 3.3115 Parent:7 Branches:1\n", - "Layer 9 Neurons: 64 Weights: 36864 TNNetConvolutionReLU(64,3,1,2,0) Output:9,9,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 2.7569 Parent:8 Branches:1\n", - "Layer 10 Neurons: 0 Weights: 0 TNNetDropout(2,1,0,0,0) Output:9,9,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:9 Branches:1\n", - "Layer 11 Neurons: 0 Weights: 0 TNNetMaxPool(2,2,0,0,0) Output:5,5,64 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:10 Branches:1\n", - "Layer 12 Neurons: 39 Weights: 62400 TNNetFullConnectLinear(39,1,1,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 7.4312 Parent:11 Branches:1\n", - "Layer 13 Neurons: 0 Weights: 0 TNNetSoftMax(0,0,0,0,0) Output:39,1,1 Learning Rate:0.0100 Inertia:0.90 Weight Sum: 0.0000 Parent:12 Branches:0\n", - "Loading 100% of the Plant leave disease dataset into memory.\n", - "Training Images:49904 Validation Images:2775 Test Images:2775\n", - "File name is: SimplePlantLeafDisease\n", - "Learning rate:0.001000 L2 decay:0.000010 Inertia:0.900000 Batch size:64 Step size:64 Staircase ephocs:10\n", - "Training images:49904\n", - "Validation images:2775\n", - "Test images:2775\n", - "Computing...\n", - "640 Examples seen. Accuracy:0.0329 Error: 1.93573 Loss:3.52334 Threads: 8 Forward time: 4.93s Backward time: 4.51s Step time: 4.20s\n", - "1280 Examples seen. Accuracy:0.0402 Error: 1.92410 Loss:3.40404 Threads: 8 Forward time: 5.06s Backward time: 4.55s Step time: 4.41s\n", - "1920 Examples seen. Accuracy:0.0502 Error: 1.90493 Loss:3.37106 Threads: 8 Forward time: 4.96s Backward time: 4.62s Step time: 4.76s\n", - "2560 Examples seen. Accuracy:0.0650 Error: 1.79781 Loss:3.00073 Threads: 8 Forward time: 4.89s Backward time: 4.57s Step time: 4.46s\n", - "3200 Examples seen. Accuracy:0.0769 Error: 1.81143 Loss:3.05211 Threads: 8 Forward time: 5.20s Backward time: 4.65s Step time: 4.43s\n", - "3840 Examples seen. Accuracy:0.0909 Error: 1.82340 Loss:2.95677 Threads: 8 Forward time: 5.02s Backward time: 4.55s Step time: 4.44s\n", - "4480 Examples seen. Accuracy:0.1032 Error: 1.73382 Loss:2.70320 Threads: 8 Forward time: 5.02s Backward time: 4.50s Step time: 4.45s\n", - "5120 Examples seen. Accuracy:0.1184 Error: 1.74171 Loss:2.65646 Threads: 8 Forward time: 5.09s Backward time: 4.61s Step time: 4.43s\n", - "5760 Examples seen. Accuracy:0.1323 Error: 1.61957 Loss:2.76495 Threads: 8 Forward time: 5.06s Backward time: 4.54s Step time: 4.47s\n", - "6400 Examples seen. Accuracy:0.1503 Error: 1.41402 Loss:2.30839 Threads: 8 Forward time: 4.99s Backward time: 4.49s Step time: 4.43s\n", - "7040 Examples seen. Accuracy:0.1664 Error: 1.63357 Loss:2.37070 Threads: 8 Forward time: 5.08s Backward time: 4.62s Step time: 4.50s\n", - "7680 Examples seen. Accuracy:0.1841 Error: 1.53345 Loss:2.28641 Threads: 8 Forward time: 4.99s Backward time: 4.60s Step time: 4.49s\n", - "8320 Examples seen. Accuracy:0.2068 Error: 1.42229 Loss:1.94608 Threads: 8 Forward time: 5.11s Backward time: 4.65s Step time: 4.46s\n", - "8960 Examples seen. Accuracy:0.2221 Error: 1.48743 Loss:2.20207 Threads: 8 Forward time: 5.25s Backward time: 4.77s Step time: 4.46s\n", - "9600 Examples seen. Accuracy:0.2384 Error: 1.44126 Loss:2.10981 Threads: 8 Forward time: 4.90s Backward time: 4.53s Step time: 4.33s\n", - "10240 Examples seen. Accuracy:0.2583 Error: 1.36298 Loss:1.69164 Threads: 8 Forward time: 4.92s Backward time: 4.49s Step time: 4.37s\n", - "10880 Examples seen. Accuracy:0.2738 Error: 1.53760 Loss:2.12548 Threads: 8 Forward time: 5.30s Backward time: 4.63s Step time: 4.36s\n", - "11520 Examples seen. Accuracy:0.2911 Error: 1.45308 Loss:2.05101 Threads: 8 Forward time: 4.83s Backward time: 4.39s Step time: 4.31s\n", - "12160 Examples seen. Accuracy:0.3092 Error: 1.31594 Loss:1.87571 Threads: 8 Forward time: 4.90s Backward time: 4.46s Step time: 4.32s\n", - "12800 Examples seen. Accuracy:0.3235 Error: 1.25067 Loss:1.94517 Threads: 8 Forward time: 4.92s Backward time: 4.40s Step time: 4.24s\n", - "13440 Examples seen. Accuracy:0.3400 Error: 1.27565 Loss:1.63616 Threads: 8 Forward time: 4.97s Backward time: 4.34s Step time: 4.26s\n", - "14080 Examples seen. Accuracy:0.3532 Error: 1.40328 Loss:2.05918 Threads: 8 Forward time: 4.95s Backward time: 4.35s Step time: 4.34s\n", - "14720 Examples seen. Accuracy:0.3659 Error: 1.44345 Loss:2.47078 Threads: 8 Forward time: 5.17s Backward time: 4.60s Step time: 4.31s\n", - "15360 Examples seen. Accuracy:0.3776 Error: 1.12124 Loss:1.53043 Threads: 8 Forward time: 5.07s Backward time: 4.36s Step time: 4.27s\n", - "16000 Examples seen. Accuracy:0.3907 Error: 1.24098 Loss:1.93580 Threads: 8 Forward time: 5.10s Backward time: 4.49s Step time: 4.25s\n", - "16640 Examples seen. Accuracy:0.4039 Error: 1.15337 Loss:1.69533 Threads: 8 Forward time: 5.27s Backward time: 4.52s Step time: 4.24s\n", - "17280 Examples seen. Accuracy:0.4182 Error: 1.20605 Loss:1.58746 Threads: 8 Forward time: 4.83s Backward time: 4.31s Step time: 4.22s\n", - "17920 Examples seen. Accuracy:0.4258 Error: 1.19611 Loss:1.67349 Threads: 8 Forward time: 4.92s Backward time: 4.35s Step time: 4.26s\n", - "18560 Examples seen. Accuracy:0.4384 Error: 1.17074 Loss:1.46777 Threads: 8 Forward time: 4.87s Backward time: 4.36s Step time: 4.23s\n", - "19200 Examples seen. Accuracy:0.4506 Error: 1.23526 Loss:1.92160 Threads: 8 Forward time: 5.06s Backward time: 4.38s Step time: 4.27s\n", - "19840 Examples seen. Accuracy:0.4551 Error: 1.22965 Loss:1.48649 Threads: 8 Forward time: 5.01s Backward time: 4.38s Step time: 4.32s\n", - "20480 Examples seen. Accuracy:0.4627 Error: 1.16185 Loss:1.66194 Threads: 8 Forward time: 4.88s Backward time: 4.36s Step time: 4.29s\n", - "21120 Examples seen. Accuracy:0.4658 Error: 1.23110 Loss:1.74371 Threads: 8 Forward time: 4.97s Backward time: 4.40s Step time: 4.28s\n", - "21760 Examples seen. Accuracy:0.4719 Error: 1.24056 Loss:1.99473 Threads: 8 Forward time: 4.95s Backward time: 4.33s Step time: 4.28s\n", - "22400 Examples seen. Accuracy:0.4806 Error: 0.96615 Loss:1.39614 Threads: 8 Forward time: 5.07s Backward time: 4.40s Step time: 4.25s\n", - "23040 Examples seen. Accuracy:0.4913 Error: 1.12965 Loss:1.54893 Threads: 8 Forward time: 5.08s Backward time: 4.36s Step time: 4.37s\n", - "23680 Examples seen. Accuracy:0.5023 Error: 1.11976 Loss:1.58815 Threads: 8 Forward time: 5.00s Backward time: 4.35s Step time: 4.30s\n", - "24320 Examples seen. Accuracy:0.5051 Error: 1.11871 Loss:1.45467 Threads: 8 Forward time: 5.22s Backward time: 4.59s Step time: 4.33s\n", - "24960 Examples seen. Accuracy:0.5128 Error: 1.01548 Loss:1.38496 Threads: 8 Forward time: 4.99s Backward time: 4.36s Step time: 4.34s\n", - "25600 Examples seen. Accuracy:0.5208 Error: 0.99291 Loss:1.30965 Threads: 8 Forward time: 5.02s Backward time: 4.41s Step time: 4.34s\n", - "26240 Examples seen. Accuracy:0.5313 Error: 0.99596 Loss:1.26464 Threads: 8 Forward time: 4.90s Backward time: 4.40s Step time: 4.74s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "26880 Examples seen. Accuracy:0.5353 Error: 1.00507 Loss:1.27191 Threads: 8 Forward time: 5.30s Backward time: 4.64s Step time: 4.42s\n", - "27520 Examples seen. Accuracy:0.5427 Error: 1.00052 Loss:1.25032 Threads: 8 Forward time: 4.95s Backward time: 4.36s Step time: 4.24s\n", - "28160 Examples seen. Accuracy:0.5459 Error: 1.08495 Loss:1.37669 Threads: 8 Forward time: 4.93s Backward time: 4.30s Step time: 4.33s\n", - "28800 Examples seen. Accuracy:0.5517 Error: 1.01282 Loss:1.20652 Threads: 8 Forward time: 4.98s Backward time: 4.29s Step time: 4.29s\n", - "29440 Examples seen. Accuracy:0.5557 Error: 1.01782 Loss:1.64236 Threads: 8 Forward time: 4.97s Backward time: 4.28s Step time: 4.27s\n", - "30080 Examples seen. Accuracy:0.5602 Error: 1.01594 Loss:1.34733 Threads: 8 Forward time: 5.09s Backward time: 4.30s Step time: 4.47s\n", - "30720 Examples seen. Accuracy:0.5625 Error: 0.96419 Loss:1.50042 Threads: 8 Forward time: 4.91s Backward time: 4.23s Step time: 4.23s\n", - "31360 Examples seen. Accuracy:0.5655 Error: 0.97419 Loss:1.09295 Threads: 8 Forward time: 4.95s Backward time: 4.25s Step time: 4.23s\n", - "32000 Examples seen. Accuracy:0.5690 Error: 1.03017 Loss:1.38354 Threads: 8 Forward time: 4.87s Backward time: 4.25s Step time: 4.21s\n", - "32640 Examples seen. Accuracy:0.5699 Error: 1.10302 Loss:1.21681 Threads: 8 Forward time: 4.93s Backward time: 4.25s Step time: 4.19s\n", - "33280 Examples seen. Accuracy:0.5746 Error: 0.91844 Loss:1.02189 Threads: 8 Forward time: 4.93s Backward time: 4.29s Step time: 4.23s\n", - "33920 Examples seen. Accuracy:0.5749 Error: 1.07339 Loss:1.34992 Threads: 8 Forward time: 4.99s Backward time: 4.31s Step time: 4.23s\n", - "34560 Examples seen. Accuracy:0.5769 Error: 0.83645 Loss:0.93622 Threads: 8 Forward time: 4.94s Backward time: 4.24s Step time: 4.21s\n", - "35200 Examples seen. Accuracy:0.5834 Error: 0.90330 Loss:1.05480 Threads: 8 Forward time: 4.98s Backward time: 4.23s Step time: 4.23s\n", - "35840 Examples seen. Accuracy:0.5869 Error: 0.93318 Loss:1.45615 Threads: 8 Forward time: 4.97s Backward time: 4.18s Step time: 4.08s\n", - "36480 Examples seen. Accuracy:0.5874 Error: 0.91055 Loss:1.14300 Threads: 8 Forward time: 5.00s Backward time: 4.19s Step time: 4.12s\n", - "37120 Examples seen. Accuracy:0.5877 Error: 0.88337 Loss:1.03457 Threads: 8 Forward time: 4.95s Backward time: 4.19s Step time: 4.12s\n", - "37760 Examples seen. Accuracy:0.5890 Error: 0.88038 Loss:1.11912 Threads: 8 Forward time: 4.95s Backward time: 4.18s Step time: 4.11s\n", - "38400 Examples seen. Accuracy:0.5929 Error: 0.88309 Loss:1.03070 Threads: 8 Forward time: 4.85s Backward time: 4.17s Step time: 4.17s\n", - "39040 Examples seen. Accuracy:0.6014 Error: 0.81315 Loss:0.87636 Threads: 8 Forward time: 4.90s Backward time: 4.18s Step time: 4.13s\n", - "39680 Examples seen. Accuracy:0.6080 Error: 0.97946 Loss:1.21024 Threads: 8 Forward time: 4.88s Backward time: 4.26s Step time: 4.16s\n", - "40320 Examples seen. Accuracy:0.6141 Error: 0.68760 Loss:1.04425 Threads: 8 Forward time: 4.86s Backward time: 4.19s Step time: 4.23s\n", - "40960 Examples seen. Accuracy:0.6174 Error: 1.00662 Loss:1.25669 Threads: 8 Forward time: 4.91s Backward time: 4.23s Step time: 4.13s\n", - "41600 Examples seen. Accuracy:0.6202 Error: 0.92953 Loss:1.22395 Threads: 8 Forward time: 4.84s Backward time: 4.19s Step time: 4.11s\n", - "42240 Examples seen. Accuracy:0.6241 Error: 1.03482 Loss:1.35462 Threads: 8 Forward time: 4.92s Backward time: 4.19s Step time: 4.12s\n", - "42880 Examples seen. Accuracy:0.6260 Error: 0.93275 Loss:1.05832 Threads: 8 Forward time: 4.82s Backward time: 4.14s Step time: 4.09s\n", - "43520 Examples seen. Accuracy:0.6306 Error: 0.85488 Loss:1.03400 Threads: 8 Forward time: 4.94s Backward time: 4.26s Step time: 4.14s\n", - "44160 Examples seen. Accuracy:0.6354 Error: 0.89946 Loss:1.16312 Threads: 8 Forward time: 4.91s Backward time: 4.21s Step time: 4.14s\n", - "44800 Examples seen. Accuracy:0.6343 Error: 1.02987 Loss:1.36401 Threads: 8 Forward time: 4.91s Backward time: 4.16s Step time: 4.16s\n", - "45440 Examples seen. Accuracy:0.6371 Error: 0.87058 Loss:0.98873 Threads: 8 Forward time: 4.91s Backward time: 4.15s Step time: 4.19s\n", - "46080 Examples seen. Accuracy:0.6375 Error: 1.08666 Loss:1.32725 Threads: 8 Forward time: 4.90s Backward time: 4.23s Step time: 4.12s\n", - "46720 Examples seen. Accuracy:0.6396 Error: 0.96513 Loss:1.08828 Threads: 8 Forward time: 4.89s Backward time: 4.16s Step time: 4.13s\n", - "47360 Examples seen. Accuracy:0.6416 Error: 0.97114 Loss:1.29618 Threads: 8 Forward time: 4.87s Backward time: 4.16s Step time: 4.14s\n", - "48000 Examples seen. Accuracy:0.6439 Error: 0.87264 Loss:1.09346 Threads: 8 Forward time: 5.05s Backward time: 4.17s Step time: 4.12s\n", - "48640 Examples seen. Accuracy:0.6484 Error: 0.94305 Loss:1.14566 Threads: 8 Forward time: 4.94s Backward time: 4.22s Step time: 4.16s\n", - "49280 Examples seen. Accuracy:0.6503 Error: 0.94371 Loss:1.15029 Threads: 8 Forward time: 4.93s Backward time: 4.11s Step time: 4.22s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 1 Examples seen:49904 Validation Accuracy: 0.7630 Validation Error: 0.6617 Validation Loss: 0.7862 Total time: 6.10min\n", - "Epoch time: 5.5 minutes. 100 epochs: 9.1 hours.\n", - "Epochs: 1. Working time: 0.1 hours.\n", - "50544 Examples seen. Accuracy:0.6560 Error: 0.75665 Loss:1.07503 Threads: 8 Forward time: 4.83s Backward time: 4.18s Step time: 4.18s\n", - "51184 Examples seen. Accuracy:0.6633 Error: 0.71766 Loss:0.91668 Threads: 8 Forward time: 4.96s Backward time: 4.19s Step time: 4.15s\n", - "51824 Examples seen. Accuracy:0.6619 Error: 0.98168 Loss:1.31640 Threads: 8 Forward time: 4.88s Backward time: 4.13s Step time: 4.14s\n", - "52464 Examples seen. Accuracy:0.6665 Error: 0.69870 Loss:0.84428 Threads: 8 Forward time: 4.91s Backward time: 4.15s Step time: 4.20s\n", - "53104 Examples seen. Accuracy:0.6701 Error: 0.73452 Loss:1.04197 Threads: 8 Forward time: 4.92s Backward time: 4.22s Step time: 4.20s\n", - "53744 Examples seen. Accuracy:0.6740 Error: 0.73798 Loss:0.91838 Threads: 8 Forward time: 4.91s Backward time: 4.08s Step time: 4.23s\n", - "54384 Examples seen. Accuracy:0.6785 Error: 0.80641 Loss:0.95520 Threads: 8 Forward time: 5.00s Backward time: 4.16s Step time: 4.28s\n", - "55024 Examples seen. Accuracy:0.6825 Error: 0.67690 Loss:0.79173 Threads: 8 Forward time: 4.90s Backward time: 4.11s Step time: 4.11s\n", - "55664 Examples seen. Accuracy:0.6826 Error: 0.73889 Loss:1.13018 Threads: 8 Forward time: 4.90s Backward time: 4.18s Step time: 4.08s\n", - "56304 Examples seen. Accuracy:0.6812 Error: 0.95271 Loss:1.17909 Threads: 8 Forward time: 4.96s Backward time: 4.19s Step time: 4.15s\n", - "56944 Examples seen. Accuracy:0.6839 Error: 0.78306 Loss:1.23661 Threads: 8 Forward time: 5.08s Backward time: 4.19s Step time: 4.19s\n", - "57584 Examples seen. Accuracy:0.6876 Error: 0.62120 Loss:0.71262 Threads: 8 Forward time: 5.48s Backward time: 4.70s Step time: 4.16s\n", - "58224 Examples seen. Accuracy:0.6914 Error: 0.63904 Loss:0.67689 Threads: 8 Forward time: 5.16s Backward time: 4.27s Step time: 4.11s\n", - "58864 Examples seen. Accuracy:0.6939 Error: 0.83921 Loss:1.03057 Threads: 8 Forward time: 4.87s Backward time: 4.11s Step time: 4.15s\n", - "59504 Examples seen. Accuracy:0.6940 Error: 0.95884 Loss:1.17702 Threads: 8 Forward time: 4.99s Backward time: 4.15s Step time: 4.20s\n", - "60144 Examples seen. Accuracy:0.6965 Error: 0.69701 Loss:0.81238 Threads: 8 Forward time: 5.01s Backward time: 4.24s Step time: 4.27s\n", - "60784 Examples seen. Accuracy:0.6980 Error: 0.75253 Loss:1.25958 Threads: 8 Forward time: 5.18s Backward time: 4.37s Step time: 4.10s\n", - "61424 Examples seen. Accuracy:0.6960 Error: 0.98763 Loss:1.45467 Threads: 8 Forward time: 4.91s Backward time: 4.10s Step time: 4.83s\n", - "62064 Examples seen. Accuracy:0.6966 Error: 0.76255 Loss:0.83836 Threads: 8 Forward time: 4.93s Backward time: 4.05s Step time: 4.03s\n", - "62704 Examples seen. Accuracy:0.6959 Error: 0.89303 Loss:1.09438 Threads: 8 Forward time: 4.95s Backward time: 4.10s Step time: 4.09s\n", - "63344 Examples seen. Accuracy:0.6963 Error: 0.77112 Loss:0.97534 Threads: 8 Forward time: 4.92s Backward time: 4.13s Step time: 4.09s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "63984 Examples seen. Accuracy:0.7006 Error: 0.61098 Loss:0.79566 Threads: 8 Forward time: 4.93s Backward time: 4.08s Step time: 4.11s\n", - "64624 Examples seen. Accuracy:0.7029 Error: 0.65674 Loss:0.68639 Threads: 8 Forward time: 4.96s Backward time: 4.09s Step time: 4.09s\n", - "65264 Examples seen. Accuracy:0.7006 Error: 0.92591 Loss:1.08386 Threads: 8 Forward time: 5.04s Backward time: 4.11s Step time: 4.27s\n", - "65904 Examples seen. Accuracy:0.7002 Error: 0.88518 Loss:1.12173 Threads: 8 Forward time: 4.96s Backward time: 4.10s Step time: 4.18s\n", - "66544 Examples seen. Accuracy:0.7054 Error: 0.73318 Loss:0.93317 Threads: 8 Forward time: 4.93s Backward time: 4.10s Step time: 4.17s\n", - "67184 Examples seen. Accuracy:0.7081 Error: 0.73602 Loss:0.96279 Threads: 8 Forward time: 4.90s Backward time: 4.14s Step time: 4.19s\n", - "67824 Examples seen. Accuracy:0.7077 Error: 0.82680 Loss:0.98536 Threads: 8 Forward time: 5.00s Backward time: 4.10s Step time: 4.16s\n", - "68464 Examples seen. Accuracy:0.7112 Error: 0.67516 Loss:0.70925 Threads: 8 Forward time: 5.02s Backward time: 4.09s Step time: 4.08s\n", - "69104 Examples seen. Accuracy:0.7110 Error: 0.68307 Loss:0.71865 Threads: 8 Forward time: 4.95s Backward time: 4.05s Step time: 4.09s\n", - "69744 Examples seen. Accuracy:0.7104 Error: 0.76397 Loss:1.00817 Threads: 8 Forward time: 4.96s Backward time: 4.09s Step time: 4.16s\n", - "70384 Examples seen. Accuracy:0.7116 Error: 0.73266 Loss:0.90177 Threads: 8 Forward time: 4.94s Backward time: 4.06s Step time: 4.19s\n", - "71024 Examples seen. Accuracy:0.7127 Error: 0.74932 Loss:0.87967 Threads: 8 Forward time: 4.94s Backward time: 4.08s Step time: 4.11s\n", - "71664 Examples seen. Accuracy:0.7125 Error: 0.73009 Loss:0.87349 Threads: 8 Forward time: 5.00s Backward time: 4.07s Step time: 4.10s\n", - "72304 Examples seen. Accuracy:0.7147 Error: 0.82438 Loss:0.99783 Threads: 8 Forward time: 4.86s Backward time: 4.02s Step time: 4.51s\n", - "72944 Examples seen. Accuracy:0.7186 Error: 0.65873 Loss:0.81989 Threads: 8 Forward time: 4.83s Backward time: 3.98s Step time: 4.01s\n", - "73584 Examples seen. Accuracy:0.7232 Error: 0.70890 Loss:0.68759 Threads: 8 Forward time: 5.04s Backward time: 4.07s Step time: 4.11s\n", - "74224 Examples seen. Accuracy:0.7237 Error: 0.67131 Loss:0.73497 Threads: 8 Forward time: 4.82s Backward time: 4.02s Step time: 4.09s\n", - "74864 Examples seen. Accuracy:0.7241 Error: 0.83505 Loss:1.08028 Threads: 8 Forward time: 4.83s Backward time: 4.00s Step time: 4.02s\n", - "75504 Examples seen. Accuracy:0.7267 Error: 0.71332 Loss:0.80013 Threads: 8 Forward time: 4.89s Backward time: 4.01s Step time: 4.07s\n", - "76144 Examples seen. Accuracy:0.7273 Error: 0.67231 Loss:0.88565 Threads: 8 Forward time: 4.91s Backward time: 4.03s Step time: 4.10s\n", - "76784 Examples seen. Accuracy:0.7307 Error: 0.74971 Loss:0.95142 Threads: 8 Forward time: 5.02s Backward time: 4.02s Step time: 4.04s\n", - "77424 Examples seen. Accuracy:0.7327 Error: 0.59625 Loss:0.59369 Threads: 8 Forward time: 4.89s Backward time: 4.04s Step time: 3.99s\n", - "78064 Examples seen. Accuracy:0.7351 Error: 0.65493 Loss:0.62656 Threads: 8 Forward time: 4.94s Backward time: 4.04s Step time: 4.05s\n", - "78704 Examples seen. Accuracy:0.7381 Error: 0.60676 Loss:0.66600 Threads: 8 Forward time: 4.92s Backward time: 4.08s Step time: 4.16s\n", - "79344 Examples seen. Accuracy:0.7401 Error: 0.62631 Loss:0.77228 Threads: 8 Forward time: 4.95s Backward time: 4.02s Step time: 4.04s\n", - "79984 Examples seen. Accuracy:0.7426 Error: 0.61855 Loss:0.67665 Threads: 8 Forward time: 5.03s Backward time: 4.08s Step time: 4.01s\n", - "80624 Examples seen. Accuracy:0.7413 Error: 0.75523 Loss:0.81710 Threads: 8 Forward time: 4.98s Backward time: 4.06s Step time: 4.04s\n", - "81264 Examples seen. Accuracy:0.7437 Error: 0.65251 Loss:0.87765 Threads: 8 Forward time: 4.96s Backward time: 4.02s Step time: 4.20s\n", - "81904 Examples seen. Accuracy:0.7470 Error: 0.73352 Loss:0.74941 Threads: 8 Forward time: 4.89s Backward time: 4.02s Step time: 4.12s\n", - "82544 Examples seen. Accuracy:0.7486 Error: 0.51316 Loss:0.66584 Threads: 8 Forward time: 4.86s Backward time: 3.98s Step time: 3.99s\n", - "83184 Examples seen. Accuracy:0.7531 Error: 0.52528 Loss:0.55559 Threads: 8 Forward time: 4.98s Backward time: 4.05s Step time: 4.88s\n", - "83824 Examples seen. Accuracy:0.7534 Error: 0.58062 Loss:0.59756 Threads: 8 Forward time: 4.84s Backward time: 4.00s Step time: 4.05s\n", - "84464 Examples seen. Accuracy:0.7512 Error: 0.70739 Loss:1.04535 Threads: 8 Forward time: 4.87s Backward time: 3.98s Step time: 4.00s\n", - "85104 Examples seen. Accuracy:0.7514 Error: 0.71399 Loss:0.82362 Threads: 8 Forward time: 4.91s Backward time: 3.94s Step time: 4.08s\n", - "85744 Examples seen. Accuracy:0.7526 Error: 0.70287 Loss:0.81352 Threads: 8 Forward time: 4.86s Backward time: 3.96s Step time: 4.05s\n", - "86384 Examples seen. Accuracy:0.7515 Error: 0.54848 Loss:0.53062 Threads: 8 Forward time: 4.84s Backward time: 4.00s Step time: 4.03s\n", - "87024 Examples seen. Accuracy:0.7491 Error: 0.86166 Loss:1.14836 Threads: 8 Forward time: 4.82s Backward time: 3.99s Step time: 4.10s\n", - "87664 Examples seen. Accuracy:0.7463 Error: 0.62805 Loss:0.71955 Threads: 8 Forward time: 4.79s Backward time: 3.92s Step time: 4.02s\n", - "88304 Examples seen. Accuracy:0.7449 Error: 0.74902 Loss:0.88322 Threads: 8 Forward time: 4.82s Backward time: 3.98s Step time: 4.06s\n", - "88944 Examples seen. Accuracy:0.7449 Error: 0.55448 Loss:0.55602 Threads: 8 Forward time: 4.93s Backward time: 3.99s Step time: 4.05s\n", - "89584 Examples seen. Accuracy:0.7445 Error: 0.62960 Loss:0.63979 Threads: 8 Forward time: 4.85s Backward time: 3.97s Step time: 4.01s\n", - "90224 Examples seen. Accuracy:0.7453 Error: 0.61505 Loss:0.71534 Threads: 8 Forward time: 5.05s Backward time: 3.99s Step time: 4.29s\n", - "90864 Examples seen. Accuracy:0.7457 Error: 0.71500 Loss:0.84404 Threads: 8 Forward time: 5.09s Backward time: 4.00s Step time: 4.14s\n", - "91504 Examples seen. Accuracy:0.7477 Error: 0.67190 Loss:0.85606 Threads: 8 Forward time: 5.11s Backward time: 4.02s Step time: 4.09s\n", - "92144 Examples seen. Accuracy:0.7488 Error: 0.52931 Loss:0.51678 Threads: 8 Forward time: 5.07s Backward time: 3.99s Step time: 4.05s\n", - "92784 Examples seen. Accuracy:0.7503 Error: 0.58245 Loss:0.64336 Threads: 8 Forward time: 5.06s Backward time: 3.99s Step time: 4.05s\n", - "93424 Examples seen. Accuracy:0.7540 Error: 0.61066 Loss:0.64922 Threads: 8 Forward time: 5.10s Backward time: 3.96s Step time: 4.05s\n", - "94064 Examples seen. Accuracy:0.7587 Error: 0.60802 Loss:0.67975 Threads: 8 Forward time: 5.07s Backward time: 3.99s Step time: 4.05s\n", - "94704 Examples seen. Accuracy:0.7574 Error: 0.76649 Loss:0.90848 Threads: 8 Forward time: 4.99s Backward time: 3.98s Step time: 4.03s\n", - "95344 Examples seen. Accuracy:0.7581 Error: 0.44448 Loss:0.47948 Threads: 8 Forward time: 5.03s Backward time: 3.97s Step time: 3.97s\n", - "95984 Examples seen. Accuracy:0.7602 Error: 0.60725 Loss:0.73486 Threads: 8 Forward time: 4.99s Backward time: 3.95s Step time: 3.99s\n", - "96624 Examples seen. Accuracy:0.7635 Error: 0.46171 Loss:0.54300 Threads: 8 Forward time: 5.00s Backward time: 3.94s Step time: 3.98s\n", - "97264 Examples seen. Accuracy:0.7632 Error: 0.63462 Loss:0.86096 Threads: 8 Forward time: 4.97s Backward time: 3.95s Step time: 3.99s\n", - "97904 Examples seen. Accuracy:0.7615 Error: 0.72525 Loss:0.77724 Threads: 8 Forward time: 5.00s Backward time: 3.93s Step time: 4.04s\n", - "98544 Examples seen. Accuracy:0.7618 Error: 0.58084 Loss:0.56447 Threads: 8 Forward time: 5.03s Backward time: 3.95s Step time: 4.03s\n", - "99184 Examples seen. Accuracy:0.7626 Error: 0.47908 Loss:0.57243 Threads: 8 Forward time: 4.90s Backward time: 3.89s Step time: 4.41s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 2 Examples seen:99808 Validation Accuracy: 0.7913 Validation Error: 0.6032 Validation Loss: 0.6920 Total time: 11.94min\n", - "Epoch time: 5.7 minutes. 100 epochs: 9.6 hours.\n", - "Epochs: 2. Working time: 0.2 hours.\n", - "100448 Examples seen. Accuracy:0.7663 Error: 0.54884 Loss:0.51615 Threads: 8 Forward time: 4.92s Backward time: 3.87s Step time: 3.95s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "101088 Examples seen. Accuracy:0.7685 Error: 0.52692 Loss:0.56441 Threads: 8 Forward time: 4.96s Backward time: 3.90s Step time: 3.95s\n", - "101728 Examples seen. Accuracy:0.7693 Error: 0.64810 Loss:0.80685 Threads: 8 Forward time: 5.08s Backward time: 4.04s Step time: 4.36s\n", - "102368 Examples seen. Accuracy:0.7656 Error: 0.73363 Loss:0.84176 Threads: 8 Forward time: 4.95s Backward time: 3.93s Step time: 3.99s\n", - "103008 Examples seen. Accuracy:0.7651 Error: 0.54348 Loss:0.65500 Threads: 8 Forward time: 4.93s Backward time: 3.90s Step time: 3.93s\n", - "103648 Examples seen. Accuracy:0.7665 Error: 0.51321 Loss:0.50862 Threads: 8 Forward time: 4.92s Backward time: 3.90s Step time: 3.94s\n", - "104288 Examples seen. Accuracy:0.7681 Error: 0.66848 Loss:0.74828 Threads: 8 Forward time: 4.98s Backward time: 3.93s Step time: 3.97s\n", - "104928 Examples seen. Accuracy:0.7679 Error: 0.67335 Loss:0.76467 Threads: 8 Forward time: 4.91s Backward time: 3.91s Step time: 4.01s\n", - "105568 Examples seen. Accuracy:0.7652 Error: 0.70474 Loss:0.73804 Threads: 8 Forward time: 4.95s Backward time: 3.92s Step time: 3.99s\n", - "106208 Examples seen. Accuracy:0.7637 Error: 0.71777 Loss:0.84559 Threads: 8 Forward time: 5.18s Backward time: 4.09s Step time: 3.93s\n", - "106848 Examples seen. Accuracy:0.7608 Error: 0.76632 Loss:0.91273 Threads: 8 Forward time: 5.09s Backward time: 3.94s Step time: 3.98s\n", - "107488 Examples seen. Accuracy:0.7618 Error: 0.58933 Loss:0.64879 Threads: 8 Forward time: 4.97s Backward time: 3.87s Step time: 3.96s\n", - "108128 Examples seen. Accuracy:0.7678 Error: 0.54975 Loss:0.76419 Threads: 8 Forward time: 4.93s Backward time: 3.90s Step time: 4.01s\n", - "108768 Examples seen. Accuracy:0.7701 Error: 0.62359 Loss:0.68389 Threads: 8 Forward time: 4.98s Backward time: 3.89s Step time: 3.98s\n", - "109408 Examples seen. Accuracy:0.7698 Error: 0.50841 Loss:0.60319 Threads: 8 Forward time: 4.95s Backward time: 3.84s Step time: 3.93s\n", - "110048 Examples seen. Accuracy:0.7691 Error: 0.68008 Loss:0.77614 Threads: 8 Forward time: 4.93s Backward time: 3.87s Step time: 3.93s\n", - "110688 Examples seen. Accuracy:0.7697 Error: 0.59959 Loss:0.65718 Threads: 8 Forward time: 4.94s Backward time: 3.90s Step time: 3.96s\n", - "111328 Examples seen. Accuracy:0.7709 Error: 0.64883 Loss:0.79060 Threads: 8 Forward time: 4.95s Backward time: 3.88s Step time: 3.94s\n", - "111968 Examples seen. Accuracy:0.7724 Error: 0.68143 Loss:0.92520 Threads: 8 Forward time: 4.91s Backward time: 3.92s Step time: 3.93s\n", - "112608 Examples seen. Accuracy:0.7744 Error: 0.65300 Loss:0.76157 Threads: 8 Forward time: 4.97s Backward time: 3.87s Step time: 3.95s\n", - "113248 Examples seen. Accuracy:0.7767 Error: 0.29263 Loss:0.33656 Threads: 8 Forward time: 4.92s Backward time: 3.88s Step time: 3.97s\n", - "113888 Examples seen. Accuracy:0.7770 Error: 0.56979 Loss:0.60193 Threads: 8 Forward time: 4.97s Backward time: 3.85s Step time: 3.96s\n", - "114528 Examples seen. Accuracy:0.7797 Error: 0.51112 Loss:0.57280 Threads: 8 Forward time: 4.96s Backward time: 3.88s Step time: 3.97s\n", - "115168 Examples seen. Accuracy:0.7802 Error: 0.46003 Loss:0.53958 Threads: 8 Forward time: 4.96s Backward time: 3.89s Step time: 3.96s\n", - "115808 Examples seen. Accuracy:0.7816 Error: 0.52953 Loss:0.63115 Threads: 8 Forward time: 4.96s Backward time: 3.85s Step time: 3.94s\n", - "116448 Examples seen. Accuracy:0.7825 Error: 0.69798 Loss:0.81466 Threads: 8 Forward time: 4.98s Backward time: 3.90s Step time: 3.93s\n", - "117088 Examples seen. Accuracy:0.7848 Error: 0.57015 Loss:0.72793 Threads: 8 Forward time: 4.90s Backward time: 3.85s Step time: 3.97s\n", - "117728 Examples seen. Accuracy:0.7840 Error: 0.72595 Loss:0.92784 Threads: 8 Forward time: 4.93s Backward time: 3.85s Step time: 3.83s\n", - "118368 Examples seen. Accuracy:0.7844 Error: 0.55089 Loss:0.65124 Threads: 8 Forward time: 4.89s Backward time: 3.85s Step time: 3.84s\n", - "119008 Examples seen. Accuracy:0.7865 Error: 0.63172 Loss:0.71128 Threads: 8 Forward time: 4.88s Backward time: 3.82s Step time: 3.85s\n", - "119648 Examples seen. Accuracy:0.7869 Error: 0.58494 Loss:0.66271 Threads: 8 Forward time: 4.97s Backward time: 3.85s Step time: 3.86s\n", - "120288 Examples seen. Accuracy:0.7882 Error: 0.35686 Loss:0.35653 Threads: 8 Forward time: 4.96s Backward time: 3.86s Step time: 3.94s\n", - "120928 Examples seen. Accuracy:0.7877 Error: 0.62645 Loss:0.75354 Threads: 8 Forward time: 4.96s Backward time: 3.82s Step time: 3.90s\n", - "121568 Examples seen. Accuracy:0.7866 Error: 0.65171 Loss:0.66633 Threads: 8 Forward time: 4.98s Backward time: 3.86s Step time: 4.00s\n", - "122208 Examples seen. Accuracy:0.7883 Error: 0.58689 Loss:0.55934 Threads: 8 Forward time: 4.91s Backward time: 3.85s Step time: 4.33s\n", - "122848 Examples seen. Accuracy:0.7905 Error: 0.50794 Loss:0.65584 Threads: 8 Forward time: 4.90s Backward time: 3.83s Step time: 3.88s\n", - "123488 Examples seen. Accuracy:0.7889 Error: 0.66883 Loss:0.73745 Threads: 8 Forward time: 4.94s Backward time: 3.82s Step time: 3.91s\n", - "124128 Examples seen. Accuracy:0.7890 Error: 0.58316 Loss:0.66858 Threads: 8 Forward time: 4.93s Backward time: 3.81s Step time: 3.85s\n", - "124768 Examples seen. Accuracy:0.7876 Error: 0.60451 Loss:0.64783 Threads: 8 Forward time: 4.96s Backward time: 3.81s Step time: 3.91s\n", - "125408 Examples seen. Accuracy:0.7894 Error: 0.39372 Loss:0.33823 Threads: 8 Forward time: 4.92s Backward time: 3.81s Step time: 3.82s\n", - "126048 Examples seen. Accuracy:0.7903 Error: 0.58704 Loss:0.78061 Threads: 8 Forward time: 4.90s Backward time: 3.82s Step time: 3.90s\n", - "126688 Examples seen. Accuracy:0.7926 Error: 0.38584 Loss:0.52507 Threads: 8 Forward time: 4.97s Backward time: 3.81s Step time: 3.91s\n", - "127328 Examples seen. Accuracy:0.7924 Error: 0.61157 Loss:0.86701 Threads: 8 Forward time: 4.90s Backward time: 3.83s Step time: 3.87s\n", - "127968 Examples seen. Accuracy:0.7938 Error: 0.59949 Loss:0.58580 Threads: 8 Forward time: 4.91s Backward time: 3.79s Step time: 3.86s\n", - "128608 Examples seen. Accuracy:0.7942 Error: 0.54707 Loss:0.78196 Threads: 8 Forward time: 4.95s Backward time: 3.82s Step time: 3.86s\n", - "129248 Examples seen. Accuracy:0.7953 Error: 0.55767 Loss:0.65099 Threads: 8 Forward time: 4.91s Backward time: 3.81s Step time: 3.87s\n", - "129888 Examples seen. Accuracy:0.7952 Error: 0.51269 Loss:0.60067 Threads: 8 Forward time: 4.91s Backward time: 3.76s Step time: 3.87s\n", - "130528 Examples seen. Accuracy:0.7939 Error: 0.59289 Loss:0.70031 Threads: 8 Forward time: 4.94s Backward time: 3.81s Step time: 3.86s\n", - "131168 Examples seen. Accuracy:0.7934 Error: 0.58187 Loss:0.67381 Threads: 8 Forward time: 4.92s Backward time: 3.81s Step time: 3.87s\n", - "131808 Examples seen. Accuracy:0.7942 Error: 0.53611 Loss:0.50868 Threads: 8 Forward time: 4.90s Backward time: 3.75s Step time: 3.85s\n", - "132448 Examples seen. Accuracy:0.7953 Error: 0.54292 Loss:0.63923 Threads: 8 Forward time: 4.93s Backward time: 3.79s Step time: 3.87s\n", - "133088 Examples seen. Accuracy:0.7920 Error: 0.76131 Loss:0.91943 Threads: 8 Forward time: 4.92s Backward time: 3.77s Step time: 3.89s\n", - "133728 Examples seen. Accuracy:0.7912 Error: 0.81032 Loss:1.00075 Threads: 8 Forward time: 4.95s Backward time: 3.76s Step time: 3.91s\n", - "134368 Examples seen. Accuracy:0.7911 Error: 0.59060 Loss:0.63550 Threads: 8 Forward time: 5.03s Backward time: 3.84s Step time: 3.93s\n", - "135008 Examples seen. Accuracy:0.7904 Error: 0.56258 Loss:0.72938 Threads: 8 Forward time: 4.94s Backward time: 3.78s Step time: 3.90s\n", - "135648 Examples seen. Accuracy:0.7923 Error: 0.60513 Loss:0.79000 Threads: 8 Forward time: 4.94s Backward time: 3.77s Step time: 3.89s\n", - "136288 Examples seen. Accuracy:0.7913 Error: 0.64303 Loss:0.67180 Threads: 8 Forward time: 4.95s Backward time: 3.79s Step time: 3.91s\n", - "136928 Examples seen. Accuracy:0.7932 Error: 0.52664 Loss:0.60806 Threads: 8 Forward time: 5.03s Backward time: 3.79s Step time: 3.91s\n", - "137568 Examples seen. Accuracy:0.7932 Error: 0.44781 Loss:0.44149 Threads: 8 Forward time: 4.92s Backward time: 3.78s Step time: 3.89s\n", - "138208 Examples seen. Accuracy:0.7932 Error: 0.54784 Loss:0.62629 Threads: 8 Forward time: 4.95s Backward time: 3.77s Step time: 3.90s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "138848 Examples seen. Accuracy:0.7925 Error: 0.53230 Loss:0.58262 Threads: 8 Forward time: 4.91s Backward time: 3.78s Step time: 3.94s\n", - "139488 Examples seen. Accuracy:0.7935 Error: 0.44143 Loss:0.56986 Threads: 8 Forward time: 4.91s Backward time: 3.76s Step time: 3.91s\n", - "140128 Examples seen. Accuracy:0.7932 Error: 0.60223 Loss:0.62486 Threads: 8 Forward time: 4.95s Backward time: 3.77s Step time: 3.90s\n", - "140768 Examples seen. Accuracy:0.7972 Error: 0.43024 Loss:0.37385 Threads: 8 Forward time: 4.92s Backward time: 3.76s Step time: 3.92s\n", - "141408 Examples seen. Accuracy:0.7999 Error: 0.64740 Loss:0.74151 Threads: 8 Forward time: 4.91s Backward time: 3.80s Step time: 3.90s\n", - "142048 Examples seen. Accuracy:0.8018 Error: 0.46514 Loss:0.64103 Threads: 8 Forward time: 5.02s Backward time: 3.81s Step time: 3.91s\n", - "142688 Examples seen. Accuracy:0.8055 Error: 0.46350 Loss:0.49218 Threads: 8 Forward time: 5.05s Backward time: 3.81s Step time: 3.95s\n", - "143328 Examples seen. Accuracy:0.8052 Error: 0.44838 Loss:0.43140 Threads: 8 Forward time: 4.99s Backward time: 3.78s Step time: 3.93s\n", - "143968 Examples seen. Accuracy:0.8067 Error: 0.45505 Loss:0.45727 Threads: 8 Forward time: 4.95s Backward time: 3.76s Step time: 3.89s\n", - "144608 Examples seen. Accuracy:0.8067 Error: 0.60553 Loss:0.92126 Threads: 8 Forward time: 4.94s Backward time: 3.75s Step time: 3.90s\n", - "145248 Examples seen. Accuracy:0.8047 Error: 0.58275 Loss:0.61992 Threads: 8 Forward time: 4.91s Backward time: 3.77s Step time: 3.88s\n", - "145888 Examples seen. Accuracy:0.8063 Error: 0.65016 Loss:0.74546 Threads: 8 Forward time: 4.94s Backward time: 3.74s Step time: 3.98s\n", - "146528 Examples seen. Accuracy:0.8090 Error: 0.49883 Loss:0.48952 Threads: 8 Forward time: 5.04s Backward time: 3.78s Step time: 3.88s\n", - "147168 Examples seen. Accuracy:0.8094 Error: 0.72466 Loss:0.94149 Threads: 8 Forward time: 4.99s Backward time: 3.75s Step time: 3.90s\n", - "147808 Examples seen. Accuracy:0.8101 Error: 0.43779 Loss:0.39046 Threads: 8 Forward time: 5.00s Backward time: 3.77s Step time: 3.90s\n", - "148448 Examples seen. Accuracy:0.8103 Error: 0.47540 Loss:0.51732 Threads: 8 Forward time: 5.01s Backward time: 3.73s Step time: 3.91s\n", - "149088 Examples seen. Accuracy:0.8125 Error: 0.61349 Loss:0.62172 Threads: 8 Forward time: 5.03s Backward time: 3.77s Step time: 3.91s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 3 Examples seen:149712 Validation Accuracy: 0.8145 Validation Error: 0.5479 Validation Loss: 0.6061 Total time: 17.52min\n", - "Epoch time: 5.1 minutes. 100 epochs: 8.5 hours.\n", - "Epochs: 3. Working time: 0.29 hours.\n", - "150352 Examples seen. Accuracy:0.8192 Error: 0.39216 Loss:0.43250 Threads: 8 Forward time: 5.00s Backward time: 3.74s Step time: 3.89s\n", - "150992 Examples seen. Accuracy:0.8173 Error: 0.50644 Loss:0.56953 Threads: 8 Forward time: 4.89s Backward time: 3.75s Step time: 3.86s\n", - "151632 Examples seen. Accuracy:0.8200 Error: 0.41418 Loss:0.52233 Threads: 8 Forward time: 4.90s Backward time: 3.72s Step time: 3.82s\n", - "152272 Examples seen. Accuracy:0.8235 Error: 0.45752 Loss:0.45199 Threads: 8 Forward time: 4.90s Backward time: 3.71s Step time: 3.82s\n", - "152912 Examples seen. Accuracy:0.8249 Error: 0.52087 Loss:0.82781 Threads: 8 Forward time: 4.91s Backward time: 3.72s Step time: 3.81s\n", - "153552 Examples seen. Accuracy:0.8240 Error: 0.53475 Loss:0.59838 Threads: 8 Forward time: 4.93s Backward time: 3.71s Step time: 3.82s\n", - "154192 Examples seen. Accuracy:0.8233 Error: 0.55490 Loss:0.79987 Threads: 8 Forward time: 4.92s Backward time: 3.68s Step time: 3.80s\n", - "154832 Examples seen. Accuracy:0.8238 Error: 0.58514 Loss:0.67710 Threads: 8 Forward time: 4.92s Backward time: 3.68s Step time: 3.82s\n", - "155472 Examples seen. Accuracy:0.8237 Error: 0.53179 Loss:0.49336 Threads: 8 Forward time: 4.96s Backward time: 3.73s Step time: 3.83s\n", - "156112 Examples seen. Accuracy:0.8234 Error: 0.44285 Loss:0.52057 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 3.84s\n", - "156752 Examples seen. Accuracy:0.8267 Error: 0.47392 Loss:0.62191 Threads: 8 Forward time: 4.93s Backward time: 3.76s Step time: 3.82s\n", - "157392 Examples seen. Accuracy:0.8227 Error: 0.60360 Loss:0.71726 Threads: 8 Forward time: 5.02s Backward time: 3.75s Step time: 3.84s\n", - "158032 Examples seen. Accuracy:0.8223 Error: 0.58812 Loss:0.65584 Threads: 8 Forward time: 4.92s Backward time: 3.71s Step time: 3.84s\n", - "158672 Examples seen. Accuracy:0.8214 Error: 0.50556 Loss:0.51089 Threads: 8 Forward time: 4.96s Backward time: 3.78s Step time: 3.83s\n", - "159312 Examples seen. Accuracy:0.8203 Error: 0.54778 Loss:0.63136 Threads: 8 Forward time: 4.91s Backward time: 3.71s Step time: 3.83s\n", - "159952 Examples seen. Accuracy:0.8208 Error: 0.52241 Loss:0.57624 Threads: 8 Forward time: 4.88s Backward time: 3.72s Step time: 3.82s\n", - "160592 Examples seen. Accuracy:0.8207 Error: 0.53627 Loss:0.59279 Threads: 8 Forward time: 4.96s Backward time: 3.74s Step time: 3.82s\n", - "161232 Examples seen. Accuracy:0.8193 Error: 0.60742 Loss:0.69559 Threads: 8 Forward time: 4.95s Backward time: 3.75s Step time: 3.82s\n", - "161872 Examples seen. Accuracy:0.8202 Error: 0.42806 Loss:0.45953 Threads: 8 Forward time: 4.89s Backward time: 3.75s Step time: 3.83s\n", - "162512 Examples seen. Accuracy:0.8204 Error: 0.35194 Loss:0.29541 Threads: 8 Forward time: 4.92s Backward time: 3.71s Step time: 3.81s\n", - "163152 Examples seen. Accuracy:0.8210 Error: 0.36569 Loss:0.32161 Threads: 8 Forward time: 4.89s Backward time: 3.73s Step time: 3.83s\n", - "163792 Examples seen. Accuracy:0.8209 Error: 0.54824 Loss:0.51071 Threads: 8 Forward time: 4.91s Backward time: 3.71s Step time: 3.80s\n", - "164432 Examples seen. Accuracy:0.8245 Error: 0.40903 Loss:0.52589 Threads: 8 Forward time: 4.89s Backward time: 3.71s Step time: 3.81s\n", - "165072 Examples seen. Accuracy:0.8250 Error: 0.47508 Loss:0.55595 Threads: 8 Forward time: 4.93s Backward time: 3.69s Step time: 3.82s\n", - "165712 Examples seen. Accuracy:0.8256 Error: 0.41772 Loss:0.44002 Threads: 8 Forward time: 4.98s Backward time: 3.74s Step time: 3.84s\n", - "166352 Examples seen. Accuracy:0.8258 Error: 0.55670 Loss:0.67870 Threads: 8 Forward time: 4.94s Backward time: 3.75s Step time: 3.86s\n", - "166992 Examples seen. Accuracy:0.8264 Error: 0.55651 Loss:0.66158 Threads: 8 Forward time: 4.97s Backward time: 3.74s Step time: 3.84s\n", - "167632 Examples seen. Accuracy:0.8291 Error: 0.38208 Loss:0.38273 Threads: 8 Forward time: 5.00s Backward time: 3.74s Step time: 3.86s\n", - "168272 Examples seen. Accuracy:0.8282 Error: 0.69786 Loss:0.82405 Threads: 8 Forward time: 4.98s Backward time: 3.73s Step time: 3.90s\n", - "168912 Examples seen. Accuracy:0.8275 Error: 0.47987 Loss:0.50884 Threads: 8 Forward time: 4.94s Backward time: 3.74s Step time: 3.90s\n", - "169552 Examples seen. Accuracy:0.8271 Error: 0.55535 Loss:0.57426 Threads: 8 Forward time: 4.94s Backward time: 3.72s Step time: 3.89s\n", - "170192 Examples seen. Accuracy:0.8283 Error: 0.59648 Loss:0.75638 Threads: 8 Forward time: 5.09s Backward time: 3.74s Step time: 3.91s\n", - "170832 Examples seen. Accuracy:0.8284 Error: 0.45573 Loss:0.40775 Threads: 8 Forward time: 4.97s Backward time: 3.75s Step time: 3.90s\n", - "171472 Examples seen. Accuracy:0.8276 Error: 0.66652 Loss:0.89765 Threads: 8 Forward time: 4.97s Backward time: 3.77s Step time: 3.90s\n", - "172112 Examples seen. Accuracy:0.8276 Error: 0.60761 Loss:0.80383 Threads: 8 Forward time: 4.98s Backward time: 3.74s Step time: 3.89s\n", - "172752 Examples seen. Accuracy:0.8279 Error: 0.33955 Loss:0.31424 Threads: 8 Forward time: 5.02s Backward time: 3.70s Step time: 3.89s\n", - "173392 Examples seen. Accuracy:0.8285 Error: 0.47791 Loss:0.57737 Threads: 8 Forward time: 5.01s Backward time: 3.70s Step time: 3.89s\n", - "174032 Examples seen. Accuracy:0.8277 Error: 0.56284 Loss:0.63809 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 4.27s\n", - "174672 Examples seen. Accuracy:0.8274 Error: 0.46905 Loss:0.63722 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 3.78s\n", - "175312 Examples seen. Accuracy:0.8308 Error: 0.40847 Loss:0.49759 Threads: 8 Forward time: 4.89s Backward time: 3.72s Step time: 3.78s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "175952 Examples seen. Accuracy:0.8299 Error: 0.49338 Loss:0.63578 Threads: 8 Forward time: 4.90s Backward time: 3.72s Step time: 3.78s\n", - "176592 Examples seen. Accuracy:0.8316 Error: 0.42440 Loss:0.40695 Threads: 8 Forward time: 4.91s Backward time: 3.72s Step time: 3.78s\n", - "177232 Examples seen. Accuracy:0.8304 Error: 0.47983 Loss:0.89970 Threads: 8 Forward time: 4.92s Backward time: 3.70s Step time: 3.78s\n", - "177872 Examples seen. Accuracy:0.8294 Error: 0.43695 Loss:0.46112 Threads: 8 Forward time: 4.93s Backward time: 3.68s Step time: 3.79s\n", - "178512 Examples seen. Accuracy:0.8313 Error: 0.46624 Loss:0.46886 Threads: 8 Forward time: 4.91s Backward time: 3.69s Step time: 3.77s\n", - "179152 Examples seen. Accuracy:0.8285 Error: 0.51522 Loss:0.62947 Threads: 8 Forward time: 4.90s Backward time: 3.68s Step time: 3.78s\n", - "179792 Examples seen. Accuracy:0.8291 Error: 0.49048 Loss:0.63040 Threads: 8 Forward time: 4.91s Backward time: 3.70s Step time: 3.77s\n", - "180432 Examples seen. Accuracy:0.8286 Error: 0.43082 Loss:0.45422 Threads: 8 Forward time: 4.88s Backward time: 3.66s Step time: 3.76s\n", - "181072 Examples seen. Accuracy:0.8300 Error: 0.26881 Loss:0.25330 Threads: 8 Forward time: 4.91s Backward time: 3.70s Step time: 3.77s\n", - "181712 Examples seen. Accuracy:0.8288 Error: 0.67797 Loss:0.73267 Threads: 8 Forward time: 4.90s Backward time: 3.68s Step time: 3.75s\n", - "182352 Examples seen. Accuracy:0.8250 Error: 0.43118 Loss:0.47404 Threads: 8 Forward time: 4.92s Backward time: 3.72s Step time: 3.79s\n", - "182992 Examples seen. Accuracy:0.8237 Error: 0.71151 Loss:0.96095 Threads: 8 Forward time: 4.97s Backward time: 3.65s Step time: 3.81s\n", - "183632 Examples seen. Accuracy:0.8236 Error: 0.51908 Loss:0.66391 Threads: 8 Forward time: 4.92s Backward time: 3.62s Step time: 3.79s\n", - "184272 Examples seen. Accuracy:0.8243 Error: 0.50184 Loss:0.55106 Threads: 8 Forward time: 4.88s Backward time: 3.65s Step time: 3.77s\n", - "184912 Examples seen. Accuracy:0.8263 Error: 0.36270 Loss:0.37580 Threads: 8 Forward time: 4.92s Backward time: 3.65s Step time: 3.75s\n", - "185552 Examples seen. Accuracy:0.8250 Error: 0.59277 Loss:0.83979 Threads: 8 Forward time: 8.22s Backward time: 5.47s Step time: 4.10s\n", - "186192 Examples seen. Accuracy:0.8270 Error: 0.41037 Loss:0.42193 Threads: 8 Forward time: 4.91s Backward time: 3.67s Step time: 3.79s\n", - "186832 Examples seen. Accuracy:0.8288 Error: 0.39750 Loss:0.36253 Threads: 8 Forward time: 4.92s Backward time: 3.67s Step time: 3.78s\n", - "187472 Examples seen. Accuracy:0.8324 Error: 0.38362 Loss:0.48083 Threads: 8 Forward time: 4.88s Backward time: 3.65s Step time: 3.79s\n", - "188112 Examples seen. Accuracy:0.8322 Error: 0.42722 Loss:0.44788 Threads: 8 Forward time: 4.92s Backward time: 3.61s Step time: 3.80s\n", - "188752 Examples seen. Accuracy:0.8327 Error: 0.38282 Loss:0.32241 Threads: 8 Forward time: 4.96s Backward time: 3.63s Step time: 3.77s\n", - "189392 Examples seen. Accuracy:0.8343 Error: 0.43995 Loss:0.48567 Threads: 8 Forward time: 4.92s Backward time: 3.63s Step time: 3.78s\n", - "190032 Examples seen. Accuracy:0.8346 Error: 0.40010 Loss:0.34967 Threads: 8 Forward time: 4.90s Backward time: 3.63s Step time: 3.77s\n", - "190672 Examples seen. Accuracy:0.8348 Error: 0.31902 Loss:0.36368 Threads: 8 Forward time: 4.91s Backward time: 3.64s Step time: 3.77s\n", - "191312 Examples seen. Accuracy:0.8367 Error: 0.38642 Loss:0.43329 Threads: 8 Forward time: 4.91s Backward time: 3.67s Step time: 3.79s\n", - "191952 Examples seen. Accuracy:0.8349 Error: 0.48097 Loss:0.70807 Threads: 8 Forward time: 4.90s Backward time: 3.65s Step time: 3.78s\n", - "192592 Examples seen. Accuracy:0.8365 Error: 0.35821 Loss:0.43316 Threads: 8 Forward time: 4.92s Backward time: 3.63s Step time: 3.79s\n", - "193232 Examples seen. Accuracy:0.8345 Error: 0.42352 Loss:0.42006 Threads: 8 Forward time: 4.94s Backward time: 3.58s Step time: 3.78s\n", - "193872 Examples seen. Accuracy:0.8347 Error: 0.47412 Loss:0.54623 Threads: 8 Forward time: 4.91s Backward time: 3.64s Step time: 3.77s\n", - "194512 Examples seen. Accuracy:0.8364 Error: 0.55156 Loss:0.53484 Threads: 8 Forward time: 5.10s Backward time: 3.69s Step time: 3.80s\n", - "195152 Examples seen. Accuracy:0.8372 Error: 0.50308 Loss:0.51425 Threads: 8 Forward time: 4.93s Backward time: 3.61s Step time: 3.81s\n", - "195792 Examples seen. Accuracy:0.8387 Error: 0.37102 Loss:0.31270 Threads: 8 Forward time: 5.04s Backward time: 3.68s Step time: 3.80s\n", - "196432 Examples seen. Accuracy:0.8386 Error: 0.35134 Loss:0.44519 Threads: 8 Forward time: 4.93s Backward time: 3.65s Step time: 3.79s\n", - "197072 Examples seen. Accuracy:0.8416 Error: 0.32214 Loss:0.37739 Threads: 8 Forward time: 4.94s Backward time: 3.62s Step time: 3.79s\n", - "197712 Examples seen. Accuracy:0.8406 Error: 0.34171 Loss:0.43089 Threads: 8 Forward time: 4.91s Backward time: 3.63s Step time: 3.78s\n", - "198352 Examples seen. Accuracy:0.8400 Error: 0.47906 Loss:0.63858 Threads: 8 Forward time: 4.91s Backward time: 3.63s Step time: 3.81s\n", - "198992 Examples seen. Accuracy:0.8406 Error: 0.41359 Loss:0.45117 Threads: 8 Forward time: 5.00s Backward time: 3.66s Step time: 3.84s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 4 Examples seen:199616 Validation Accuracy: 0.8388 Validation Error: 0.4966 Validation Loss: 0.5339 Total time: 22.97min\n", - "Epoch time: 5 minutes. 100 epochs: 8.3 hours.\n", - "Epochs: 4. Working time: 0.38 hours.\n", - "200256 Examples seen. Accuracy:0.8431 Error: 0.47344 Loss:0.42972 Threads: 8 Forward time: 4.89s Backward time: 3.66s Step time: 3.80s\n", - "200896 Examples seen. Accuracy:0.8464 Error: 0.31460 Loss:0.25577 Threads: 8 Forward time: 5.03s Backward time: 3.74s Step time: 3.82s\n", - "201536 Examples seen. Accuracy:0.8462 Error: 0.64650 Loss:0.69419 Threads: 8 Forward time: 4.92s Backward time: 3.64s Step time: 3.81s\n", - "202176 Examples seen. Accuracy:0.8464 Error: 0.36548 Loss:0.38364 Threads: 8 Forward time: 4.90s Backward time: 3.63s Step time: 3.82s\n", - "202816 Examples seen. Accuracy:0.8455 Error: 0.38595 Loss:0.40465 Threads: 8 Forward time: 4.96s Backward time: 3.61s Step time: 3.76s\n", - "203456 Examples seen. Accuracy:0.8436 Error: 0.40913 Loss:0.38274 Threads: 8 Forward time: 4.91s Backward time: 3.63s Step time: 3.78s\n", - "204096 Examples seen. Accuracy:0.8421 Error: 0.47723 Loss:0.51854 Threads: 8 Forward time: 4.90s Backward time: 3.60s Step time: 3.75s\n", - "204736 Examples seen. Accuracy:0.8417 Error: 0.42170 Loss:0.45629 Threads: 8 Forward time: 4.90s Backward time: 3.63s Step time: 3.78s\n", - "205376 Examples seen. Accuracy:0.8405 Error: 0.48133 Loss:0.55698 Threads: 8 Forward time: 4.92s Backward time: 3.62s Step time: 3.79s\n", - "206016 Examples seen. Accuracy:0.8415 Error: 0.51225 Loss:0.55395 Threads: 8 Forward time: 4.90s Backward time: 3.62s Step time: 3.78s\n", - "206656 Examples seen. Accuracy:0.8419 Error: 0.52573 Loss:0.54872 Threads: 8 Forward time: 4.91s Backward time: 3.61s Step time: 3.79s\n", - "207296 Examples seen. Accuracy:0.8407 Error: 0.46051 Loss:0.49051 Threads: 8 Forward time: 4.92s Backward time: 3.59s Step time: 3.79s\n", - "207936 Examples seen. Accuracy:0.8413 Error: 0.39777 Loss:0.45502 Threads: 8 Forward time: 4.90s Backward time: 3.59s Step time: 3.91s\n", - "208576 Examples seen. Accuracy:0.8424 Error: 0.35375 Loss:0.31353 Threads: 8 Forward time: 4.92s Backward time: 3.58s Step time: 3.84s\n", - "209216 Examples seen. Accuracy:0.8441 Error: 0.36028 Loss:0.41186 Threads: 8 Forward time: 4.91s Backward time: 3.59s Step time: 3.84s\n", - "209856 Examples seen. Accuracy:0.8442 Error: 0.46064 Loss:0.47827 Threads: 8 Forward time: 6.18s Backward time: 4.40s Step time: 4.22s\n", - "210496 Examples seen. Accuracy:0.8439 Error: 0.35342 Loss:0.30903 Threads: 8 Forward time: 4.94s Backward time: 3.66s Step time: 4.56s\n", - "211136 Examples seen. Accuracy:0.8428 Error: 0.43144 Loss:0.57266 Threads: 8 Forward time: 4.92s Backward time: 3.58s Step time: 3.75s\n", - "211776 Examples seen. Accuracy:0.8435 Error: 0.45110 Loss:0.39395 Threads: 8 Forward time: 4.92s Backward time: 3.58s Step time: 3.77s\n", - "212416 Examples seen. Accuracy:0.8431 Error: 0.29183 Loss:0.32461 Threads: 8 Forward time: 4.95s Backward time: 3.59s Step time: 3.76s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "213056 Examples seen. Accuracy:0.8425 Error: 0.44417 Loss:0.50224 Threads: 8 Forward time: 4.96s Backward time: 3.57s Step time: 3.76s\n", - "213696 Examples seen. Accuracy:0.8430 Error: 0.44293 Loss:0.53411 Threads: 8 Forward time: 4.92s Backward time: 3.55s Step time: 3.74s\n", - "214336 Examples seen. Accuracy:0.8440 Error: 0.66991 Loss:0.95960 Threads: 8 Forward time: 4.94s Backward time: 3.59s Step time: 3.74s\n", - "214976 Examples seen. Accuracy:0.8467 Error: 0.45656 Loss:0.44948 Threads: 8 Forward time: 4.93s Backward time: 3.56s Step time: 3.74s\n", - "215616 Examples seen. Accuracy:0.8465 Error: 0.41955 Loss:0.40876 Threads: 8 Forward time: 4.92s Backward time: 3.55s Step time: 3.73s\n", - "216256 Examples seen. Accuracy:0.8477 Error: 0.41120 Loss:0.44879 Threads: 8 Forward time: 4.93s Backward time: 3.52s Step time: 3.74s\n", - "216896 Examples seen. Accuracy:0.8490 Error: 0.33548 Loss:0.28479 Threads: 8 Forward time: 4.91s Backward time: 3.59s Step time: 3.76s\n", - "217536 Examples seen. Accuracy:0.8495 Error: 0.44548 Loss:0.53520 Threads: 8 Forward time: 4.92s Backward time: 3.60s Step time: 3.74s\n", - "218176 Examples seen. Accuracy:0.8495 Error: 0.42480 Loss:0.45789 Threads: 8 Forward time: 4.99s Backward time: 3.67s Step time: 3.76s\n", - "218816 Examples seen. Accuracy:0.8500 Error: 0.36274 Loss:0.32787 Threads: 8 Forward time: 5.00s Backward time: 3.60s Step time: 3.79s\n", - "219456 Examples seen. Accuracy:0.8476 Error: 0.46745 Loss:0.53919 Threads: 8 Forward time: 4.96s Backward time: 3.60s Step time: 3.76s\n", - "220096 Examples seen. Accuracy:0.8470 Error: 0.52822 Loss:0.65375 Threads: 8 Forward time: 4.93s Backward time: 3.56s Step time: 3.74s\n", - "220736 Examples seen. Accuracy:0.8463 Error: 0.38752 Loss:0.57444 Threads: 8 Forward time: 4.95s Backward time: 3.60s Step time: 3.75s\n", - "221376 Examples seen. Accuracy:0.8485 Error: 0.32242 Loss:0.40611 Threads: 8 Forward time: 4.94s Backward time: 3.59s Step time: 3.75s\n", - "222016 Examples seen. Accuracy:0.8511 Error: 0.36085 Loss:0.38120 Threads: 8 Forward time: 4.98s Backward time: 3.64s Step time: 3.76s\n", - "222656 Examples seen. Accuracy:0.8518 Error: 0.32258 Loss:0.28939 Threads: 8 Forward time: 4.94s Backward time: 3.58s Step time: 3.75s\n", - "223296 Examples seen. Accuracy:0.8504 Error: 0.54130 Loss:0.66737 Threads: 8 Forward time: 4.97s Backward time: 3.60s Step time: 3.77s\n", - "223936 Examples seen. Accuracy:0.8485 Error: 0.31742 Loss:0.31574 Threads: 8 Forward time: 4.96s Backward time: 3.56s Step time: 3.80s\n", - "224576 Examples seen. Accuracy:0.8506 Error: 0.41915 Loss:0.39697 Threads: 8 Forward time: 4.98s Backward time: 3.56s Step time: 3.76s\n", - "225216 Examples seen. Accuracy:0.8527 Error: 0.31546 Loss:0.30623 Threads: 8 Forward time: 5.04s Backward time: 3.60s Step time: 3.79s\n", - "225856 Examples seen. Accuracy:0.8511 Error: 0.46154 Loss:0.46196 Threads: 8 Forward time: 4.93s Backward time: 3.56s Step time: 3.78s\n", - "226496 Examples seen. Accuracy:0.8537 Error: 0.23920 Loss:0.17325 Threads: 8 Forward time: 5.03s Backward time: 3.58s Step time: 3.76s\n", - "227136 Examples seen. Accuracy:0.8552 Error: 0.35634 Loss:0.44460 Threads: 8 Forward time: 4.93s Backward time: 3.58s Step time: 4.35s\n", - "227776 Examples seen. Accuracy:0.8554 Error: 0.30531 Loss:0.25647 Threads: 8 Forward time: 4.91s Backward time: 3.57s Step time: 3.79s\n", - "228416 Examples seen. Accuracy:0.8555 Error: 0.30167 Loss:0.35479 Threads: 8 Forward time: 4.92s Backward time: 3.56s Step time: 3.79s\n", - "229056 Examples seen. Accuracy:0.8563 Error: 0.39529 Loss:0.40308 Threads: 8 Forward time: 5.01s Backward time: 3.60s Step time: 3.79s\n", - "229696 Examples seen. Accuracy:0.8571 Error: 0.48068 Loss:0.55256 Threads: 8 Forward time: 4.92s Backward time: 3.59s Step time: 3.80s\n", - "230336 Examples seen. Accuracy:0.8592 Error: 0.25813 Loss:0.29048 Threads: 8 Forward time: 4.93s Backward time: 3.58s Step time: 3.78s\n", - "230976 Examples seen. Accuracy:0.8586 Error: 0.32898 Loss:0.32639 Threads: 8 Forward time: 4.94s Backward time: 3.57s Step time: 3.79s\n", - "231616 Examples seen. Accuracy:0.8603 Error: 0.27744 Loss:0.30155 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.79s\n", - "232256 Examples seen. Accuracy:0.8605 Error: 0.38925 Loss:0.48196 Threads: 8 Forward time: 4.94s Backward time: 3.55s Step time: 3.79s\n", - "232896 Examples seen. Accuracy:0.8612 Error: 0.35306 Loss:0.36334 Threads: 8 Forward time: 4.92s Backward time: 3.57s Step time: 3.79s\n", - "233536 Examples seen. Accuracy:0.8614 Error: 0.30785 Loss:0.69586 Threads: 8 Forward time: 4.94s Backward time: 3.53s Step time: 3.80s\n", - "234176 Examples seen. Accuracy:0.8609 Error: 0.33963 Loss:0.37742 Threads: 8 Forward time: 4.93s Backward time: 3.60s Step time: 3.83s\n", - "234816 Examples seen. Accuracy:0.8608 Error: 0.41324 Loss:0.37596 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.81s\n", - "235456 Examples seen. Accuracy:0.8604 Error: 0.53449 Loss:0.65127 Threads: 8 Forward time: 4.96s Backward time: 3.55s Step time: 3.80s\n", - "236096 Examples seen. Accuracy:0.8581 Error: 0.34933 Loss:0.38058 Threads: 8 Forward time: 4.95s Backward time: 3.54s Step time: 3.87s\n", - "236736 Examples seen. Accuracy:0.8594 Error: 0.39007 Loss:0.39326 Threads: 8 Forward time: 5.02s Backward time: 3.55s Step time: 3.86s\n", - "237376 Examples seen. Accuracy:0.8580 Error: 0.46112 Loss:0.61066 Threads: 8 Forward time: 4.96s Backward time: 3.51s Step time: 3.76s\n", - "238016 Examples seen. Accuracy:0.8592 Error: 0.36977 Loss:0.41938 Threads: 8 Forward time: 4.96s Backward time: 3.54s Step time: 3.80s\n", - "238656 Examples seen. Accuracy:0.8589 Error: 0.34850 Loss:0.36165 Threads: 8 Forward time: 4.94s Backward time: 3.55s Step time: 3.81s\n", - "239296 Examples seen. Accuracy:0.8588 Error: 0.28783 Loss:0.24266 Threads: 8 Forward time: 4.94s Backward time: 3.55s Step time: 3.81s\n", - "239936 Examples seen. Accuracy:0.8589 Error: 0.43056 Loss:0.34941 Threads: 8 Forward time: 4.99s Backward time: 3.59s Step time: 3.81s\n", - "240576 Examples seen. Accuracy:0.8558 Error: 0.49845 Loss:0.62678 Threads: 8 Forward time: 5.50s Backward time: 3.89s Step time: 3.93s\n", - "241216 Examples seen. Accuracy:0.8551 Error: 0.27495 Loss:0.23386 Threads: 8 Forward time: 6.22s Backward time: 4.29s Step time: 5.39s\n", - "241856 Examples seen. Accuracy:0.8554 Error: 0.32550 Loss:0.33272 Threads: 8 Forward time: 5.54s Backward time: 3.94s Step time: 5.53s\n", - "242496 Examples seen. Accuracy:0.8577 Error: 0.39306 Loss:0.51455 Threads: 8 Forward time: 6.09s Backward time: 4.27s Step time: 4.71s\n", - "243136 Examples seen. Accuracy:0.8602 Error: 0.38005 Loss:0.45507 Threads: 8 Forward time: 6.67s Backward time: 4.69s Step time: 5.48s\n", - "243776 Examples seen. Accuracy:0.8616 Error: 0.30631 Loss:0.31517 Threads: 8 Forward time: 5.58s Backward time: 4.02s Step time: 5.39s\n", - "244416 Examples seen. Accuracy:0.8621 Error: 0.45326 Loss:0.48289 Threads: 8 Forward time: 6.05s Backward time: 4.18s Step time: 5.48s\n", - "245056 Examples seen. Accuracy:0.8606 Error: 0.45040 Loss:0.51158 Threads: 8 Forward time: 6.01s Backward time: 4.16s Step time: 5.62s\n", - "245696 Examples seen. Accuracy:0.8564 Error: 0.33490 Loss:0.30871 Threads: 8 Forward time: 6.69s Backward time: 4.64s Step time: 5.67s\n", - "246336 Examples seen. Accuracy:0.8551 Error: 0.47371 Loss:0.44888 Threads: 8 Forward time: 6.26s Backward time: 4.45s Step time: 5.47s\n", - "246976 Examples seen. Accuracy:0.8571 Error: 0.27831 Loss:0.27319 Threads: 8 Forward time: 5.62s Backward time: 3.95s Step time: 5.29s\n", - "247616 Examples seen. Accuracy:0.8572 Error: 0.24168 Loss:0.19918 Threads: 8 Forward time: 5.97s Backward time: 4.22s Step time: 5.21s\n", - "248256 Examples seen. Accuracy:0.8592 Error: 0.40101 Loss:0.47751 Threads: 8 Forward time: 5.83s Backward time: 4.04s Step time: 5.53s\n", - "248896 Examples seen. Accuracy:0.8572 Error: 0.49252 Loss:0.70675 Threads: 8 Forward time: 5.78s Backward time: 4.03s Step time: 5.25s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 5 Examples seen:249520 Validation Accuracy: 0.8529 Validation Error: 0.4536 Validation Loss: 0.4699 Total time: 28.82min\n", - "Epoch time: 6.8 minutes. 100 epochs: 11 hours.\n", - "Epochs: 5. Working time: 0.48 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "250160 Examples seen. Accuracy:0.8557 Error: 0.41416 Loss:0.61978 Threads: 8 Forward time: 6.00s Backward time: 4.18s Step time: 5.61s\n", - "250800 Examples seen. Accuracy:0.8554 Error: 0.47289 Loss:0.61539 Threads: 8 Forward time: 5.89s Backward time: 4.12s Step time: 5.41s\n", - "251440 Examples seen. Accuracy:0.8546 Error: 0.56265 Loss:0.61656 Threads: 8 Forward time: 6.12s Backward time: 4.34s Step time: 5.55s\n", - "252080 Examples seen. Accuracy:0.8535 Error: 0.40453 Loss:0.44082 Threads: 8 Forward time: 6.81s Backward time: 4.65s Step time: 5.44s\n", - "252720 Examples seen. Accuracy:0.8529 Error: 0.40592 Loss:0.45493 Threads: 8 Forward time: 6.13s Backward time: 4.28s Step time: 5.26s\n", - "253360 Examples seen. Accuracy:0.8540 Error: 0.34238 Loss:0.41759 Threads: 8 Forward time: 6.27s Backward time: 4.35s Step time: 5.52s\n", - "254000 Examples seen. Accuracy:0.8550 Error: 0.39450 Loss:0.40056 Threads: 8 Forward time: 5.68s Backward time: 4.04s Step time: 5.38s\n", - "254640 Examples seen. Accuracy:0.8573 Error: 0.49733 Loss:0.66362 Threads: 8 Forward time: 6.38s Backward time: 4.51s Step time: 5.67s\n", - "255280 Examples seen. Accuracy:0.8578 Error: 0.42118 Loss:0.43178 Threads: 8 Forward time: 5.71s Backward time: 4.07s Step time: 5.40s\n", - "255920 Examples seen. Accuracy:0.8561 Error: 0.54741 Loss:0.65239 Threads: 8 Forward time: 7.54s Backward time: 5.30s Step time: 5.40s\n", - "256560 Examples seen. Accuracy:0.8565 Error: 0.45397 Loss:0.45796 Threads: 8 Forward time: 6.71s Backward time: 4.68s Step time: 5.55s\n", - "257200 Examples seen. Accuracy:0.8600 Error: 0.35031 Loss:0.29598 Threads: 8 Forward time: 6.64s Backward time: 4.60s Step time: 5.31s\n", - "257840 Examples seen. Accuracy:0.8588 Error: 0.52212 Loss:0.54196 Threads: 8 Forward time: 5.80s Backward time: 3.94s Step time: 5.50s\n", - "258480 Examples seen. Accuracy:0.8585 Error: 0.33339 Loss:0.32437 Threads: 8 Forward time: 6.36s Backward time: 4.52s Step time: 5.53s\n", - "259120 Examples seen. Accuracy:0.8587 Error: 0.48267 Loss:0.75483 Threads: 8 Forward time: 6.12s Backward time: 4.23s Step time: 5.49s\n", - "259760 Examples seen. Accuracy:0.8601 Error: 0.37181 Loss:0.28587 Threads: 8 Forward time: 6.58s Backward time: 4.64s Step time: 5.53s\n", - "260400 Examples seen. Accuracy:0.8621 Error: 0.31563 Loss:0.40805 Threads: 8 Forward time: 6.36s Backward time: 4.42s Step time: 5.27s\n", - "261040 Examples seen. Accuracy:0.8622 Error: 0.33065 Loss:0.29644 Threads: 8 Forward time: 6.17s Backward time: 4.29s Step time: 5.18s\n", - "261680 Examples seen. Accuracy:0.8641 Error: 0.30131 Loss:0.29589 Threads: 8 Forward time: 6.20s Backward time: 4.41s Step time: 5.24s\n", - "262320 Examples seen. Accuracy:0.8663 Error: 0.21092 Loss:0.25346 Threads: 8 Forward time: 5.78s Backward time: 4.02s Step time: 5.01s\n", - "262960 Examples seen. Accuracy:0.8680 Error: 0.38215 Loss:0.33075 Threads: 8 Forward time: 5.40s Backward time: 3.77s Step time: 5.08s\n", - "263600 Examples seen. Accuracy:0.8682 Error: 0.26736 Loss:0.24997 Threads: 8 Forward time: 6.68s Backward time: 4.65s Step time: 5.49s\n", - "264240 Examples seen. Accuracy:0.8679 Error: 0.35094 Loss:0.32577 Threads: 8 Forward time: 6.54s Backward time: 4.44s Step time: 6.06s\n", - "264880 Examples seen. Accuracy:0.8682 Error: 0.47006 Loss:0.65329 Threads: 8 Forward time: 5.25s Backward time: 3.64s Step time: 4.91s\n", - "265520 Examples seen. Accuracy:0.8682 Error: 0.32088 Loss:0.31916 Threads: 8 Forward time: 5.68s Backward time: 3.98s Step time: 4.27s\n", - "266160 Examples seen. Accuracy:0.8689 Error: 0.29626 Loss:0.23896 Threads: 8 Forward time: 5.31s Backward time: 3.70s Step time: 4.93s\n", - "266800 Examples seen. Accuracy:0.8700 Error: 0.25193 Loss:0.37996 Threads: 8 Forward time: 5.42s Backward time: 3.81s Step time: 4.78s\n", - "267440 Examples seen. Accuracy:0.8717 Error: 0.21002 Loss:0.14987 Threads: 8 Forward time: 5.28s Backward time: 3.69s Step time: 4.89s\n", - "268080 Examples seen. Accuracy:0.8717 Error: 0.41192 Loss:0.49685 Threads: 8 Forward time: 5.69s Backward time: 4.05s Step time: 4.63s\n", - "268720 Examples seen. Accuracy:0.8699 Error: 0.37305 Loss:0.39862 Threads: 8 Forward time: 5.71s Backward time: 4.00s Step time: 4.73s\n", - "269360 Examples seen. Accuracy:0.8716 Error: 0.28204 Loss:0.28513 Threads: 8 Forward time: 5.11s Backward time: 3.56s Step time: 4.07s\n", - "270000 Examples seen. Accuracy:0.8714 Error: 0.32895 Loss:0.29585 Threads: 8 Forward time: 5.14s Backward time: 3.59s Step time: 4.38s\n", - "270640 Examples seen. Accuracy:0.8712 Error: 0.18401 Loss:0.21646 Threads: 8 Forward time: 4.99s Backward time: 3.54s Step time: 4.41s\n", - "271280 Examples seen. Accuracy:0.8693 Error: 0.43247 Loss:0.60597 Threads: 8 Forward time: 4.94s Backward time: 3.56s Step time: 3.79s\n", - "271920 Examples seen. Accuracy:0.8696 Error: 0.43727 Loss:0.66205 Threads: 8 Forward time: 4.94s Backward time: 3.56s Step time: 3.82s\n", - "272560 Examples seen. Accuracy:0.8690 Error: 0.35901 Loss:0.39837 Threads: 8 Forward time: 4.98s Backward time: 3.55s Step time: 3.80s\n", - "273200 Examples seen. Accuracy:0.8719 Error: 0.29738 Loss:0.32262 Threads: 8 Forward time: 4.96s Backward time: 3.55s Step time: 3.79s\n", - "273840 Examples seen. Accuracy:0.8691 Error: 0.33896 Loss:0.36114 Threads: 8 Forward time: 4.97s Backward time: 3.55s Step time: 3.83s\n", - "274480 Examples seen. Accuracy:0.8694 Error: 0.34602 Loss:0.38747 Threads: 8 Forward time: 4.98s Backward time: 3.54s Step time: 3.75s\n", - "275120 Examples seen. Accuracy:0.8720 Error: 0.30371 Loss:0.60516 Threads: 8 Forward time: 5.05s Backward time: 3.58s Step time: 3.77s\n", - "275760 Examples seen. Accuracy:0.8720 Error: 0.48180 Loss:0.56105 Threads: 8 Forward time: 4.92s Backward time: 3.54s Step time: 3.79s\n", - "276400 Examples seen. Accuracy:0.8686 Error: 0.42068 Loss:0.51996 Threads: 8 Forward time: 4.97s Backward time: 3.52s Step time: 3.78s\n", - "277040 Examples seen. Accuracy:0.8697 Error: 0.33638 Loss:0.36763 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.81s\n", - "277680 Examples seen. Accuracy:0.8679 Error: 0.40034 Loss:0.49190 Threads: 8 Forward time: 5.07s Backward time: 3.62s Step time: 3.81s\n", - "278320 Examples seen. Accuracy:0.8674 Error: 0.32358 Loss:0.38193 Threads: 8 Forward time: 4.97s Backward time: 3.56s Step time: 3.77s\n", - "278960 Examples seen. Accuracy:0.8693 Error: 0.26562 Loss:0.27574 Threads: 8 Forward time: 4.99s Backward time: 3.55s Step time: 3.89s\n", - "279600 Examples seen. Accuracy:0.8698 Error: 0.31484 Loss:0.38780 Threads: 8 Forward time: 4.98s Backward time: 3.55s Step time: 3.76s\n", - "280240 Examples seen. Accuracy:0.8715 Error: 0.30730 Loss:0.36770 Threads: 8 Forward time: 4.99s Backward time: 3.50s Step time: 3.77s\n", - "280880 Examples seen. Accuracy:0.8723 Error: 0.41658 Loss:0.51453 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.80s\n", - "281520 Examples seen. Accuracy:0.8715 Error: 0.44212 Loss:0.76795 Threads: 8 Forward time: 5.00s Backward time: 3.52s Step time: 3.86s\n", - "282160 Examples seen. Accuracy:0.8708 Error: 0.42520 Loss:0.37480 Threads: 8 Forward time: 4.95s Backward time: 3.46s Step time: 3.80s\n", - "282800 Examples seen. Accuracy:0.8709 Error: 0.45617 Loss:0.51872 Threads: 8 Forward time: 5.02s Backward time: 3.52s Step time: 3.93s\n", - "283440 Examples seen. Accuracy:0.8713 Error: 0.29606 Loss:0.44740 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.81s\n", - "284080 Examples seen. Accuracy:0.8748 Error: 0.35755 Loss:0.39252 Threads: 8 Forward time: 4.98s Backward time: 3.51s Step time: 3.75s\n", - "284720 Examples seen. Accuracy:0.8759 Error: 0.38344 Loss:0.34992 Threads: 8 Forward time: 4.93s Backward time: 3.54s Step time: 3.74s\n", - "285360 Examples seen. Accuracy:0.8758 Error: 0.29416 Loss:0.25656 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.73s\n", - "286000 Examples seen. Accuracy:0.8765 Error: 0.20789 Loss:0.15329 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.73s\n", - "286640 Examples seen. Accuracy:0.8745 Error: 0.37439 Loss:0.31489 Threads: 8 Forward time: 5.00s Backward time: 3.49s Step time: 3.75s\n", - "287280 Examples seen. Accuracy:0.8743 Error: 0.34163 Loss:0.42036 Threads: 8 Forward time: 4.92s Backward time: 3.49s Step time: 3.75s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "287920 Examples seen. Accuracy:0.8740 Error: 0.25792 Loss:0.27702 Threads: 8 Forward time: 4.93s Backward time: 3.47s Step time: 3.73s\n", - "288560 Examples seen. Accuracy:0.8731 Error: 0.36310 Loss:0.48028 Threads: 8 Forward time: 4.90s Backward time: 3.50s Step time: 3.76s\n", - "289200 Examples seen. Accuracy:0.8726 Error: 0.45299 Loss:0.42203 Threads: 8 Forward time: 4.95s Backward time: 3.49s Step time: 3.73s\n", - "289840 Examples seen. Accuracy:0.8694 Error: 0.47282 Loss:0.45889 Threads: 8 Forward time: 4.92s Backward time: 3.50s Step time: 3.72s\n", - "290480 Examples seen. Accuracy:0.8701 Error: 0.30834 Loss:0.31276 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.74s\n", - "291120 Examples seen. Accuracy:0.8710 Error: 0.23451 Loss:0.19223 Threads: 8 Forward time: 5.01s Backward time: 3.50s Step time: 3.79s\n", - "291760 Examples seen. Accuracy:0.8687 Error: 0.31808 Loss:0.35315 Threads: 8 Forward time: 5.00s Backward time: 3.50s Step time: 3.79s\n", - "292400 Examples seen. Accuracy:0.8686 Error: 0.37695 Loss:0.37042 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.76s\n", - "293040 Examples seen. Accuracy:0.8697 Error: 0.25772 Loss:0.22597 Threads: 8 Forward time: 5.02s Backward time: 3.52s Step time: 3.76s\n", - "293680 Examples seen. Accuracy:0.8702 Error: 0.31892 Loss:0.32680 Threads: 8 Forward time: 4.96s Backward time: 3.45s Step time: 3.76s\n", - "294320 Examples seen. Accuracy:0.8702 Error: 0.37245 Loss:0.38415 Threads: 8 Forward time: 4.96s Backward time: 3.48s Step time: 3.75s\n", - "294960 Examples seen. Accuracy:0.8707 Error: 0.24536 Loss:0.32072 Threads: 8 Forward time: 4.97s Backward time: 3.48s Step time: 3.74s\n", - "295600 Examples seen. Accuracy:0.8717 Error: 0.22107 Loss:0.17873 Threads: 8 Forward time: 5.03s Backward time: 3.52s Step time: 3.76s\n", - "296240 Examples seen. Accuracy:0.8715 Error: 0.31787 Loss:0.30973 Threads: 8 Forward time: 5.03s Backward time: 3.52s Step time: 3.82s\n", - "296880 Examples seen. Accuracy:0.8733 Error: 0.25571 Loss:0.25033 Threads: 8 Forward time: 5.10s Backward time: 3.50s Step time: 3.77s\n", - "297520 Examples seen. Accuracy:0.8724 Error: 0.43821 Loss:0.61105 Threads: 8 Forward time: 5.03s Backward time: 3.51s Step time: 3.79s\n", - "298160 Examples seen. Accuracy:0.8717 Error: 0.27836 Loss:0.20738 Threads: 8 Forward time: 5.05s Backward time: 3.48s Step time: 3.78s\n", - "298800 Examples seen. Accuracy:0.8724 Error: 0.33369 Loss:0.35276 Threads: 8 Forward time: 5.06s Backward time: 3.46s Step time: 3.77s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 6 Examples seen:299424 Validation Accuracy: 0.8750 Validation Error: 0.4079 Validation Loss: 0.4092 Total time: 34.99min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 6. Working time: 0.58 hours.\n", - "300064 Examples seen. Accuracy:0.8730 Error: 0.44049 Loss:0.41471 Threads: 8 Forward time: 4.98s Backward time: 3.48s Step time: 3.91s\n", - "300704 Examples seen. Accuracy:0.8741 Error: 0.31149 Loss:0.36726 Threads: 8 Forward time: 4.95s Backward time: 3.46s Step time: 3.85s\n", - "301344 Examples seen. Accuracy:0.8768 Error: 0.39692 Loss:0.37729 Threads: 8 Forward time: 4.95s Backward time: 3.46s Step time: 3.84s\n", - "301984 Examples seen. Accuracy:0.8769 Error: 0.37734 Loss:0.37744 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.85s\n", - "302624 Examples seen. Accuracy:0.8744 Error: 0.33676 Loss:0.39071 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.88s\n", - "303264 Examples seen. Accuracy:0.8744 Error: 0.26232 Loss:0.31777 Threads: 8 Forward time: 4.96s Backward time: 3.49s Step time: 3.82s\n", - "303904 Examples seen. Accuracy:0.8756 Error: 0.17301 Loss:0.13140 Threads: 8 Forward time: 4.92s Backward time: 3.48s Step time: 3.75s\n", - "304544 Examples seen. Accuracy:0.8762 Error: 0.28372 Loss:0.30428 Threads: 8 Forward time: 4.95s Backward time: 3.52s Step time: 3.76s\n", - "305184 Examples seen. Accuracy:0.8781 Error: 0.31444 Loss:0.31657 Threads: 8 Forward time: 5.71s Backward time: 4.01s Step time: 4.24s\n", - "305824 Examples seen. Accuracy:0.8772 Error: 0.34488 Loss:0.36012 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 4.27s\n", - "306464 Examples seen. Accuracy:0.8762 Error: 0.34542 Loss:0.34030 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.77s\n", - "307104 Examples seen. Accuracy:0.8771 Error: 0.27357 Loss:0.25984 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.76s\n", - "307744 Examples seen. Accuracy:0.8772 Error: 0.28256 Loss:0.24896 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.75s\n", - "308384 Examples seen. Accuracy:0.8770 Error: 0.27555 Loss:0.32732 Threads: 8 Forward time: 4.93s Backward time: 3.47s Step time: 3.75s\n", - "309024 Examples seen. Accuracy:0.8782 Error: 0.26220 Loss:0.22220 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.77s\n", - "309664 Examples seen. Accuracy:0.8792 Error: 0.33832 Loss:0.34935 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.77s\n", - "310304 Examples seen. Accuracy:0.8800 Error: 0.19303 Loss:0.20110 Threads: 8 Forward time: 4.93s Backward time: 3.50s Step time: 3.75s\n", - "310944 Examples seen. Accuracy:0.8781 Error: 0.38871 Loss:0.46341 Threads: 8 Forward time: 4.95s Backward time: 3.53s Step time: 3.75s\n", - "311584 Examples seen. Accuracy:0.8768 Error: 0.37301 Loss:0.46918 Threads: 8 Forward time: 4.95s Backward time: 3.51s Step time: 3.76s\n", - "312224 Examples seen. Accuracy:0.8776 Error: 0.41793 Loss:0.55892 Threads: 8 Forward time: 4.92s Backward time: 3.49s Step time: 3.74s\n", - "312864 Examples seen. Accuracy:0.8786 Error: 0.36048 Loss:0.38111 Threads: 8 Forward time: 4.95s Backward time: 3.50s Step time: 3.75s\n", - "313504 Examples seen. Accuracy:0.8785 Error: 0.26750 Loss:0.34519 Threads: 8 Forward time: 4.97s Backward time: 3.48s Step time: 3.83s\n", - "314144 Examples seen. Accuracy:0.8767 Error: 0.30653 Loss:0.36306 Threads: 8 Forward time: 4.99s Backward time: 3.48s Step time: 3.79s\n", - "314784 Examples seen. Accuracy:0.8768 Error: 0.42513 Loss:0.39690 Threads: 8 Forward time: 5.04s Backward time: 3.47s Step time: 3.79s\n", - "315424 Examples seen. Accuracy:0.8790 Error: 0.30792 Loss:0.29558 Threads: 8 Forward time: 5.06s Backward time: 3.53s Step time: 3.82s\n", - "316064 Examples seen. Accuracy:0.8795 Error: 0.44585 Loss:0.50948 Threads: 8 Forward time: 4.99s Backward time: 3.52s Step time: 3.80s\n", - "316704 Examples seen. Accuracy:0.8807 Error: 0.23449 Loss:0.23607 Threads: 8 Forward time: 4.98s Backward time: 3.51s Step time: 3.82s\n", - "317344 Examples seen. Accuracy:0.8812 Error: 0.30097 Loss:0.28438 Threads: 8 Forward time: 4.99s Backward time: 3.48s Step time: 3.78s\n", - "317984 Examples seen. Accuracy:0.8814 Error: 0.29702 Loss:0.28903 Threads: 8 Forward time: 4.98s Backward time: 3.50s Step time: 3.80s\n", - "318624 Examples seen. Accuracy:0.8821 Error: 0.33107 Loss:0.27959 Threads: 8 Forward time: 4.94s Backward time: 3.47s Step time: 3.79s\n", - "319264 Examples seen. Accuracy:0.8834 Error: 0.38918 Loss:0.40117 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.75s\n", - "319904 Examples seen. Accuracy:0.8842 Error: 0.27450 Loss:0.28561 Threads: 8 Forward time: 4.96s Backward time: 3.49s Step time: 3.79s\n", - "320544 Examples seen. Accuracy:0.8824 Error: 0.35142 Loss:0.32862 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.79s\n", - "321184 Examples seen. Accuracy:0.8837 Error: 0.24816 Loss:0.20022 Threads: 8 Forward time: 5.02s Backward time: 3.46s Step time: 3.79s\n", - "321824 Examples seen. Accuracy:0.8837 Error: 0.37376 Loss:0.45675 Threads: 8 Forward time: 5.00s Backward time: 3.49s Step time: 3.79s\n", - "322464 Examples seen. Accuracy:0.8827 Error: 0.26662 Loss:0.26174 Threads: 8 Forward time: 5.01s Backward time: 3.48s Step time: 3.78s\n", - "323104 Examples seen. Accuracy:0.8848 Error: 0.21413 Loss:0.18100 Threads: 8 Forward time: 8.51s Backward time: 5.97s Step time: 4.40s\n", - "323744 Examples seen. Accuracy:0.8831 Error: 0.19351 Loss:0.13206 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.82s\n", - "324384 Examples seen. Accuracy:0.8825 Error: 0.31639 Loss:0.28280 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.77s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "325024 Examples seen. Accuracy:0.8833 Error: 0.32511 Loss:0.36948 Threads: 8 Forward time: 4.96s Backward time: 3.46s Step time: 3.79s\n", - "325664 Examples seen. Accuracy:0.8840 Error: 0.23351 Loss:0.25399 Threads: 8 Forward time: 4.92s Backward time: 3.48s Step time: 3.78s\n", - "326304 Examples seen. Accuracy:0.8851 Error: 0.26503 Loss:0.25498 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.77s\n", - "326944 Examples seen. Accuracy:0.8868 Error: 0.31091 Loss:0.31941 Threads: 8 Forward time: 4.97s Backward time: 3.54s Step time: 3.80s\n", - "327584 Examples seen. Accuracy:0.8863 Error: 0.42101 Loss:0.60087 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.79s\n", - "328224 Examples seen. Accuracy:0.8879 Error: 0.34348 Loss:0.28510 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.78s\n", - "328864 Examples seen. Accuracy:0.8876 Error: 0.33577 Loss:0.32055 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.78s\n", - "329504 Examples seen. Accuracy:0.8873 Error: 0.32396 Loss:0.34236 Threads: 8 Forward time: 4.93s Backward time: 3.51s Step time: 3.77s\n", - "330144 Examples seen. Accuracy:0.8874 Error: 0.42834 Loss:0.45800 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.78s\n", - "330784 Examples seen. Accuracy:0.8873 Error: 0.37089 Loss:0.38457 Threads: 8 Forward time: 4.99s Backward time: 3.47s Step time: 3.76s\n", - "331424 Examples seen. Accuracy:0.8857 Error: 0.38040 Loss:0.33412 Threads: 8 Forward time: 4.98s Backward time: 3.49s Step time: 3.78s\n", - "332064 Examples seen. Accuracy:0.8850 Error: 0.26981 Loss:0.24799 Threads: 8 Forward time: 4.96s Backward time: 3.49s Step time: 3.80s\n", - "332704 Examples seen. Accuracy:0.8858 Error: 0.22499 Loss:0.21251 Threads: 8 Forward time: 4.95s Backward time: 3.53s Step time: 3.80s\n", - "333344 Examples seen. Accuracy:0.8868 Error: 0.25093 Loss:0.27546 Threads: 8 Forward time: 5.08s Backward time: 3.61s Step time: 3.86s\n", - "333984 Examples seen. Accuracy:0.8864 Error: 0.40687 Loss:0.42739 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.82s\n", - "334624 Examples seen. Accuracy:0.8855 Error: 0.25305 Loss:0.40589 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.78s\n", - "335264 Examples seen. Accuracy:0.8855 Error: 0.23767 Loss:0.21024 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.76s\n", - "335904 Examples seen. Accuracy:0.8877 Error: 0.33541 Loss:0.31152 Threads: 8 Forward time: 4.96s Backward time: 3.47s Step time: 3.76s\n", - "336544 Examples seen. Accuracy:0.8883 Error: 0.28533 Loss:0.27548 Threads: 8 Forward time: 4.98s Backward time: 3.50s Step time: 3.77s\n", - "337184 Examples seen. Accuracy:0.8829 Error: 0.55519 Loss:0.72949 Threads: 8 Forward time: 4.97s Backward time: 3.51s Step time: 3.81s\n", - "337824 Examples seen. Accuracy:0.8799 Error: 0.40741 Loss:0.44673 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.76s\n", - "338464 Examples seen. Accuracy:0.8811 Error: 0.29507 Loss:0.34375 Threads: 8 Forward time: 4.99s Backward time: 3.51s Step time: 4.02s\n", - "339104 Examples seen. Accuracy:0.8836 Error: 0.30081 Loss:0.21952 Threads: 8 Forward time: 5.01s Backward time: 3.45s Step time: 3.87s\n", - "339744 Examples seen. Accuracy:0.8833 Error: 0.34809 Loss:0.30790 Threads: 8 Forward time: 4.91s Backward time: 3.49s Step time: 3.79s\n", - "340384 Examples seen. Accuracy:0.8839 Error: 0.31525 Loss:0.28637 Threads: 8 Forward time: 4.92s Backward time: 3.52s Step time: 3.79s\n", - "341024 Examples seen. Accuracy:0.8822 Error: 0.29187 Loss:0.27903 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.81s\n", - "341664 Examples seen. Accuracy:0.8819 Error: 0.28927 Loss:0.34736 Threads: 8 Forward time: 5.00s Backward time: 3.48s Step time: 3.81s\n", - "342304 Examples seen. Accuracy:0.8818 Error: 0.23394 Loss:0.24446 Threads: 8 Forward time: 5.03s Backward time: 3.54s Step time: 3.80s\n", - "342944 Examples seen. Accuracy:0.8816 Error: 0.30263 Loss:0.36698 Threads: 8 Forward time: 4.94s Backward time: 3.50s Step time: 3.79s\n", - "343584 Examples seen. Accuracy:0.8820 Error: 0.20980 Loss:0.20819 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.79s\n", - "344224 Examples seen. Accuracy:0.8826 Error: 0.28569 Loss:0.24812 Threads: 8 Forward time: 4.98s Backward time: 3.52s Step time: 3.79s\n", - "344864 Examples seen. Accuracy:0.8815 Error: 0.28951 Loss:0.29310 Threads: 8 Forward time: 5.04s Backward time: 3.48s Step time: 3.82s\n", - "345504 Examples seen. Accuracy:0.8808 Error: 0.41354 Loss:0.40160 Threads: 8 Forward time: 4.97s Backward time: 3.46s Step time: 3.81s\n", - "346144 Examples seen. Accuracy:0.8791 Error: 0.37657 Loss:0.36933 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.79s\n", - "346784 Examples seen. Accuracy:0.8766 Error: 0.29656 Loss:0.29073 Threads: 8 Forward time: 4.94s Backward time: 3.49s Step time: 3.79s\n", - "347424 Examples seen. Accuracy:0.8769 Error: 0.22626 Loss:0.19025 Threads: 8 Forward time: 4.97s Backward time: 3.45s Step time: 3.79s\n", - "348064 Examples seen. Accuracy:0.8776 Error: 0.27147 Loss:0.29989 Threads: 8 Forward time: 4.96s Backward time: 3.47s Step time: 3.79s\n", - "348704 Examples seen. Accuracy:0.8778 Error: 0.34234 Loss:0.36908 Threads: 8 Forward time: 4.95s Backward time: 3.49s Step time: 3.78s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 7 Examples seen:349328 Validation Accuracy: 0.8917 Validation Error: 0.3574 Validation Loss: 0.3466 Total time: 40.44min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 7. Working time: 0.67 hours.\n", - "349968 Examples seen. Accuracy:0.8768 Error: 0.36863 Loss:0.35538 Threads: 8 Forward time: 4.95s Backward time: 3.50s Step time: 3.76s\n", - "350608 Examples seen. Accuracy:0.8772 Error: 0.34856 Loss:0.43196 Threads: 8 Forward time: 5.03s Backward time: 3.54s Step time: 3.77s\n", - "351248 Examples seen. Accuracy:0.8779 Error: 0.43606 Loss:0.58930 Threads: 8 Forward time: 5.02s Backward time: 3.49s Step time: 3.78s\n", - "351888 Examples seen. Accuracy:0.8800 Error: 0.45147 Loss:0.49937 Threads: 8 Forward time: 4.95s Backward time: 3.48s Step time: 3.77s\n", - "352528 Examples seen. Accuracy:0.8787 Error: 0.29451 Loss:0.27244 Threads: 8 Forward time: 4.98s Backward time: 3.46s Step time: 3.78s\n", - "353168 Examples seen. Accuracy:0.8802 Error: 0.16149 Loss:0.14533 Threads: 8 Forward time: 4.99s Backward time: 3.50s Step time: 3.79s\n", - "353808 Examples seen. Accuracy:0.8837 Error: 0.20755 Loss:0.26077 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.79s\n", - "354448 Examples seen. Accuracy:0.8835 Error: 0.20552 Loss:0.14693 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.78s\n", - "355088 Examples seen. Accuracy:0.8808 Error: 0.28900 Loss:0.27395 Threads: 8 Forward time: 4.97s Backward time: 3.49s Step time: 3.78s\n", - "355728 Examples seen. Accuracy:0.8814 Error: 0.26270 Loss:0.21000 Threads: 8 Forward time: 4.97s Backward time: 3.47s Step time: 3.79s\n", - "356368 Examples seen. Accuracy:0.8814 Error: 0.32122 Loss:0.31663 Threads: 8 Forward time: 4.91s Backward time: 3.47s Step time: 3.77s\n", - "357008 Examples seen. Accuracy:0.8820 Error: 0.25219 Loss:0.30103 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.75s\n", - "357648 Examples seen. Accuracy:0.8839 Error: 0.23050 Loss:0.25109 Threads: 8 Forward time: 4.96s Backward time: 3.52s Step time: 3.76s\n", - "358288 Examples seen. Accuracy:0.8843 Error: 0.19620 Loss:0.16388 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.76s\n", - "358928 Examples seen. Accuracy:0.8841 Error: 0.29312 Loss:0.29182 Threads: 8 Forward time: 4.95s Backward time: 3.50s Step time: 3.75s\n", - "359568 Examples seen. Accuracy:0.8838 Error: 0.21842 Loss:0.15144 Threads: 8 Forward time: 4.92s Backward time: 3.45s Step time: 3.75s\n", - "360208 Examples seen. Accuracy:0.8859 Error: 0.32965 Loss:0.30473 Threads: 8 Forward time: 4.97s Backward time: 3.50s Step time: 3.73s\n", - "360848 Examples seen. Accuracy:0.8880 Error: 0.18466 Loss:0.13168 Threads: 8 Forward time: 5.00s Backward time: 3.50s Step time: 3.88s\n", - "361488 Examples seen. Accuracy:0.8884 Error: 0.26196 Loss:0.22501 Threads: 8 Forward time: 4.99s Backward time: 3.49s Step time: 3.79s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "362128 Examples seen. Accuracy:0.8895 Error: 0.20811 Loss:0.25456 Threads: 8 Forward time: 4.92s Backward time: 3.48s Step time: 3.83s\n", - "362768 Examples seen. Accuracy:0.8919 Error: 0.10902 Loss:0.09427 Threads: 8 Forward time: 4.97s Backward time: 3.52s Step time: 3.75s\n", - "363408 Examples seen. Accuracy:0.8940 Error: 0.25671 Loss:0.21309 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.75s\n", - "364048 Examples seen. Accuracy:0.8925 Error: 0.38461 Loss:0.48285 Threads: 8 Forward time: 5.00s Backward time: 3.50s Step time: 3.75s\n", - "364688 Examples seen. Accuracy:0.8890 Error: 0.29568 Loss:0.42930 Threads: 8 Forward time: 4.94s Backward time: 3.48s Step time: 3.73s\n", - "365328 Examples seen. Accuracy:0.8895 Error: 0.21174 Loss:0.18697 Threads: 8 Forward time: 4.94s Backward time: 3.50s Step time: 3.71s\n", - "365968 Examples seen. Accuracy:0.8891 Error: 0.36499 Loss:0.52891 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.71s\n", - "366608 Examples seen. Accuracy:0.8908 Error: 0.25228 Loss:0.27931 Threads: 8 Forward time: 4.93s Backward time: 3.48s Step time: 3.71s\n", - "367248 Examples seen. Accuracy:0.8905 Error: 0.29385 Loss:0.28685 Threads: 8 Forward time: 4.95s Backward time: 3.47s Step time: 3.71s\n", - "367888 Examples seen. Accuracy:0.8910 Error: 0.32724 Loss:0.34549 Threads: 8 Forward time: 4.94s Backward time: 3.47s Step time: 3.72s\n", - "368528 Examples seen. Accuracy:0.8916 Error: 0.25854 Loss:0.32644 Threads: 8 Forward time: 5.06s Backward time: 3.53s Step time: 3.78s\n", - "369168 Examples seen. Accuracy:0.8927 Error: 0.16793 Loss:0.11426 Threads: 8 Forward time: 5.05s Backward time: 3.51s Step time: 3.82s\n", - "369808 Examples seen. Accuracy:0.8944 Error: 0.24239 Loss:0.18175 Threads: 8 Forward time: 4.97s Backward time: 3.52s Step time: 3.85s\n", - "370448 Examples seen. Accuracy:0.8929 Error: 0.31733 Loss:0.40820 Threads: 8 Forward time: 4.95s Backward time: 3.51s Step time: 3.80s\n", - "371088 Examples seen. Accuracy:0.8932 Error: 0.30357 Loss:0.25450 Threads: 8 Forward time: 4.96s Backward time: 3.47s Step time: 3.79s\n", - "371728 Examples seen. Accuracy:0.8934 Error: 0.21168 Loss:0.21406 Threads: 8 Forward time: 4.95s Backward time: 3.49s Step time: 3.85s\n", - "372368 Examples seen. Accuracy:0.8910 Error: 0.35418 Loss:0.44411 Threads: 8 Forward time: 4.99s Backward time: 3.48s Step time: 3.78s\n", - "373008 Examples seen. Accuracy:0.8920 Error: 0.36992 Loss:0.37239 Threads: 8 Forward time: 4.99s Backward time: 3.47s Step time: 3.74s\n", - "373648 Examples seen. Accuracy:0.8896 Error: 0.30363 Loss:0.29197 Threads: 8 Forward time: 4.97s Backward time: 3.46s Step time: 3.75s\n", - "374288 Examples seen. Accuracy:0.8892 Error: 0.18495 Loss:0.15381 Threads: 8 Forward time: 5.04s Backward time: 3.51s Step time: 3.80s\n", - "374928 Examples seen. Accuracy:0.8876 Error: 0.50349 Loss:0.63044 Threads: 8 Forward time: 5.02s Backward time: 3.46s Step time: 3.78s\n", - "375568 Examples seen. Accuracy:0.8877 Error: 0.28011 Loss:0.36740 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.77s\n", - "376208 Examples seen. Accuracy:0.8877 Error: 0.28996 Loss:0.28570 Threads: 8 Forward time: 5.08s Backward time: 3.46s Step time: 3.78s\n", - "376848 Examples seen. Accuracy:0.8889 Error: 0.42555 Loss:0.44836 Threads: 8 Forward time: 5.01s Backward time: 3.43s Step time: 4.22s\n", - "377488 Examples seen. Accuracy:0.8912 Error: 0.19588 Loss:0.16835 Threads: 8 Forward time: 4.91s Backward time: 3.47s Step time: 3.71s\n", - "378128 Examples seen. Accuracy:0.8921 Error: 0.25972 Loss:0.27612 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.74s\n", - "378768 Examples seen. Accuracy:0.8925 Error: 0.24128 Loss:0.20132 Threads: 8 Forward time: 4.95s Backward time: 3.47s Step time: 3.72s\n", - "379408 Examples seen. Accuracy:0.8941 Error: 0.28113 Loss:0.22375 Threads: 8 Forward time: 4.91s Backward time: 3.46s Step time: 3.75s\n", - "380048 Examples seen. Accuracy:0.8960 Error: 0.18650 Loss:0.13804 Threads: 8 Forward time: 4.93s Backward time: 3.44s Step time: 3.73s\n", - "380688 Examples seen. Accuracy:0.8948 Error: 0.31853 Loss:0.75931 Threads: 8 Forward time: 4.90s Backward time: 3.42s Step time: 3.72s\n", - "381328 Examples seen. Accuracy:0.8962 Error: 0.24520 Loss:0.22383 Threads: 8 Forward time: 4.97s Backward time: 3.45s Step time: 3.73s\n", - "381968 Examples seen. Accuracy:0.8979 Error: 0.28176 Loss:0.27597 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.73s\n", - "382608 Examples seen. Accuracy:0.8947 Error: 0.41118 Loss:0.44400 Threads: 8 Forward time: 4.91s Backward time: 3.50s Step time: 3.74s\n", - "383248 Examples seen. Accuracy:0.8955 Error: 0.28678 Loss:0.27215 Threads: 8 Forward time: 4.90s Backward time: 3.40s Step time: 3.73s\n", - "383888 Examples seen. Accuracy:0.8973 Error: 0.36359 Loss:0.44842 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.72s\n", - "384528 Examples seen. Accuracy:0.8990 Error: 0.24468 Loss:0.34310 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.73s\n", - "385168 Examples seen. Accuracy:0.8995 Error: 0.10447 Loss:0.07142 Threads: 8 Forward time: 4.90s Backward time: 3.46s Step time: 3.71s\n", - "385808 Examples seen. Accuracy:0.8988 Error: 0.24809 Loss:0.31383 Threads: 8 Forward time: 4.92s Backward time: 3.46s Step time: 3.72s\n", - "386448 Examples seen. Accuracy:0.8986 Error: 0.21520 Loss:0.16863 Threads: 8 Forward time: 4.92s Backward time: 3.45s Step time: 3.71s\n", - "387088 Examples seen. Accuracy:0.8983 Error: 0.13757 Loss:0.09858 Threads: 8 Forward time: 4.89s Backward time: 3.44s Step time: 3.71s\n", - "387728 Examples seen. Accuracy:0.8975 Error: 0.32179 Loss:0.31074 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.81s\n", - "388368 Examples seen. Accuracy:0.8976 Error: 0.19826 Loss:0.21246 Threads: 8 Forward time: 5.00s Backward time: 3.48s Step time: 3.76s\n", - "389008 Examples seen. Accuracy:0.8968 Error: 0.34311 Loss:0.30443 Threads: 8 Forward time: 4.90s Backward time: 3.45s Step time: 3.73s\n", - "389648 Examples seen. Accuracy:0.8958 Error: 0.30185 Loss:0.28582 Threads: 8 Forward time: 5.11s Backward time: 3.48s Step time: 3.77s\n", - "390288 Examples seen. Accuracy:0.8972 Error: 0.35667 Loss:0.33354 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.74s\n", - "390928 Examples seen. Accuracy:0.8974 Error: 0.37074 Loss:0.46821 Threads: 8 Forward time: 4.93s Backward time: 3.46s Step time: 3.73s\n", - "391568 Examples seen. Accuracy:0.8991 Error: 0.29196 Loss:0.26430 Threads: 8 Forward time: 4.98s Backward time: 3.52s Step time: 3.72s\n", - "392208 Examples seen. Accuracy:0.9002 Error: 0.24413 Loss:0.40399 Threads: 8 Forward time: 4.90s Backward time: 3.46s Step time: 3.72s\n", - "392848 Examples seen. Accuracy:0.8976 Error: 0.27860 Loss:0.28463 Threads: 8 Forward time: 4.94s Backward time: 3.47s Step time: 3.73s\n", - "393488 Examples seen. Accuracy:0.8976 Error: 0.25641 Loss:0.25788 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.71s\n", - "394128 Examples seen. Accuracy:0.8978 Error: 0.25440 Loss:0.22948 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.72s\n", - "394768 Examples seen. Accuracy:0.8961 Error: 0.16535 Loss:0.13436 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.72s\n", - "395408 Examples seen. Accuracy:0.8959 Error: 0.30948 Loss:0.34091 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.73s\n", - "396048 Examples seen. Accuracy:0.8958 Error: 0.09142 Loss:0.05720 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.72s\n", - "396688 Examples seen. Accuracy:0.8944 Error: 0.30728 Loss:0.35313 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.71s\n", - "397328 Examples seen. Accuracy:0.8956 Error: 0.37548 Loss:0.54506 Threads: 8 Forward time: 4.95s Backward time: 3.45s Step time: 3.71s\n", - "397968 Examples seen. Accuracy:0.8949 Error: 0.21717 Loss:0.16693 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.73s\n", - "398608 Examples seen. Accuracy:0.8962 Error: 0.20949 Loss:0.20576 Threads: 8 Forward time: 5.06s Backward time: 3.45s Step time: 3.77s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epochs: 8 Examples seen:399232 Validation Accuracy: 0.9105 Validation Error: 0.3023 Validation Loss: 0.2852 Total time: 45.80min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.265 Min Weight: -0.262 Max Output: 3.990 Min Output: -4.138 TNNetConvolutionLinear 66,66,64 Times: 8.53s 0.40s Parent:0\n", - "Layer 2 Max Output: 3.990 Min Output: -2.102 TNNetMaxPool 33,33,64 Times: 3.61s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.620 Min Weight: 0.282 Max Output: 5.977 Min Output: -3.842 TNNetMovingStdNormalization 33,33,64 Times: 0.28s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.147 Min Weight: -0.143 Max Output: 6.271 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.85s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.246 Min Weight: -0.229 Max Output: 10.426 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.86s 0.18s Parent:4\n", - "Layer 6 Max Output: 10.426 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.51s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.156 Min Weight: -0.170 Max Output: 4.913 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.47s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.163 Min Weight: -0.145 Max Output: 3.961 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.46s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.168 Min Weight: -0.156 Max Output: 5.379 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.42s 0.02s Parent:8\n", - "Layer 10 Max Output: 5.379 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 5.379 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.291 Min Weight: -0.236 Max Output: 11.969 Min Output: -7.291 TNNetFullConnectLinear 39,1,1 Times: 0.04s 0.00s Parent:11\n", - "Layer 13 Max Output: 0.978 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 8. Working time: 0.76 hours.\n", - "399872 Examples seen. Accuracy:0.8994 Error: 0.21442 Loss:0.16581 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.75s\n", - "400512 Examples seen. Accuracy:0.9003 Error: 0.27726 Loss:0.27547 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.76s\n", - "401152 Examples seen. Accuracy:0.8994 Error: 0.31509 Loss:0.43434 Threads: 8 Forward time: 5.04s Backward time: 3.50s Step time: 3.76s\n", - "401792 Examples seen. Accuracy:0.8993 Error: 0.33578 Loss:0.28914 Threads: 8 Forward time: 4.99s Backward time: 3.42s Step time: 4.39s\n", - "402432 Examples seen. Accuracy:0.9006 Error: 0.16803 Loss:0.12056 Threads: 8 Forward time: 4.98s Backward time: 3.46s Step time: 3.72s\n", - "403072 Examples seen. Accuracy:0.9020 Error: 0.15076 Loss:0.11473 Threads: 8 Forward time: 4.94s Backward time: 3.45s Step time: 3.78s\n", - "403712 Examples seen. Accuracy:0.9025 Error: 0.26013 Loss:0.26911 Threads: 8 Forward time: 4.94s Backward time: 3.42s Step time: 3.73s\n", - "404352 Examples seen. Accuracy:0.9032 Error: 0.19309 Loss:0.15420 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.78s\n", - "404992 Examples seen. Accuracy:0.9044 Error: 0.16549 Loss:0.19730 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.72s\n", - "405632 Examples seen. Accuracy:0.9038 Error: 0.24984 Loss:0.28750 Threads: 8 Forward time: 5.03s Backward time: 3.48s Step time: 3.83s\n", - "406272 Examples seen. Accuracy:0.9033 Error: 0.34040 Loss:0.44361 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.75s\n", - "406912 Examples seen. Accuracy:0.9013 Error: 0.27724 Loss:0.32900 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.73s\n", - "407552 Examples seen. Accuracy:0.9017 Error: 0.29230 Loss:0.34874 Threads: 8 Forward time: 4.94s Backward time: 3.45s Step time: 3.74s\n", - "408192 Examples seen. Accuracy:0.9002 Error: 0.37263 Loss:0.48775 Threads: 8 Forward time: 4.94s Backward time: 3.45s Step time: 3.79s\n", - "408832 Examples seen. Accuracy:0.9001 Error: 0.34247 Loss:0.35019 Threads: 8 Forward time: 4.96s Backward time: 3.45s Step time: 3.73s\n", - "409472 Examples seen. Accuracy:0.8991 Error: 0.26548 Loss:0.26771 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.86s\n", - "410112 Examples seen. Accuracy:0.8981 Error: 0.27243 Loss:0.29595 Threads: 8 Forward time: 4.97s Backward time: 3.47s Step time: 3.74s\n", - "410752 Examples seen. Accuracy:0.8965 Error: 0.26792 Loss:0.21175 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.76s\n", - "411392 Examples seen. Accuracy:0.8994 Error: 0.13233 Loss:0.08746 Threads: 8 Forward time: 4.97s Backward time: 3.44s Step time: 3.73s\n", - "412032 Examples seen. Accuracy:0.8994 Error: 0.24668 Loss:0.29498 Threads: 8 Forward time: 4.95s Backward time: 3.43s Step time: 3.81s\n", - "412672 Examples seen. Accuracy:0.8997 Error: 0.40232 Loss:0.56119 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.74s\n", - "413312 Examples seen. Accuracy:0.8988 Error: 0.36374 Loss:0.50164 Threads: 8 Forward time: 5.05s Backward time: 3.45s Step time: 3.85s\n", - "413952 Examples seen. Accuracy:0.8990 Error: 0.27126 Loss:0.31077 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.83s\n", - "414592 Examples seen. Accuracy:0.8972 Error: 0.24241 Loss:0.22294 Threads: 8 Forward time: 4.99s Backward time: 3.43s Step time: 3.80s\n", - "415232 Examples seen. Accuracy:0.8982 Error: 0.25018 Loss:0.23907 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.82s\n", - "415872 Examples seen. Accuracy:0.8977 Error: 0.42614 Loss:0.54798 Threads: 8 Forward time: 4.94s Backward time: 3.44s Step time: 3.88s\n", - "416512 Examples seen. Accuracy:0.8982 Error: 0.30717 Loss:0.29425 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.84s\n", - "417152 Examples seen. Accuracy:0.8976 Error: 0.27737 Loss:0.28189 Threads: 8 Forward time: 5.17s Backward time: 3.50s Step time: 3.86s\n", - "417792 Examples seen. Accuracy:0.8999 Error: 0.37017 Loss:0.34436 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.88s\n", - "418432 Examples seen. Accuracy:0.8997 Error: 0.24142 Loss:0.22316 Threads: 8 Forward time: 4.97s Backward time: 3.46s Step time: 3.82s\n", - "419072 Examples seen. Accuracy:0.8996 Error: 0.29826 Loss:0.25407 Threads: 8 Forward time: 4.91s Backward time: 3.41s Step time: 3.74s\n", - "419712 Examples seen. Accuracy:0.8999 Error: 0.26656 Loss:0.25406 Threads: 8 Forward time: 4.91s Backward time: 3.41s Step time: 3.72s\n", - "420352 Examples seen. Accuracy:0.9016 Error: 0.30254 Loss:0.31285 Threads: 8 Forward time: 4.92s Backward time: 3.41s Step time: 3.70s\n", - "420992 Examples seen. Accuracy:0.9012 Error: 0.34374 Loss:0.37865 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.73s\n", - "421632 Examples seen. Accuracy:0.9007 Error: 0.29218 Loss:0.31859 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.72s\n", - "422272 Examples seen. Accuracy:0.9022 Error: 0.28574 Loss:0.28517 Threads: 8 Forward time: 5.01s Backward time: 3.41s Step time: 3.74s\n", - "422912 Examples seen. Accuracy:0.9028 Error: 0.33697 Loss:0.40628 Threads: 8 Forward time: 5.01s Backward time: 3.44s Step time: 3.77s\n", - "423552 Examples seen. Accuracy:0.9031 Error: 0.27130 Loss:0.24747 Threads: 8 Forward time: 4.95s Backward time: 3.45s Step time: 3.73s\n", - "424192 Examples seen. Accuracy:0.9050 Error: 0.22315 Loss:0.23372 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.71s\n", - "424832 Examples seen. Accuracy:0.9050 Error: 0.30035 Loss:0.25561 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.70s\n", - "425472 Examples seen. Accuracy:0.9050 Error: 0.28654 Loss:0.35782 Threads: 8 Forward time: 4.93s Backward time: 3.44s Step time: 3.70s\n", - "426112 Examples seen. Accuracy:0.9046 Error: 0.30567 Loss:0.26560 Threads: 8 Forward time: 4.92s Backward time: 3.43s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "426752 Examples seen. Accuracy:0.9069 Error: 0.25852 Loss:0.25752 Threads: 8 Forward time: 5.00s Backward time: 3.45s Step time: 3.69s\n", - "427392 Examples seen. Accuracy:0.9069 Error: 0.11511 Loss:0.10765 Threads: 8 Forward time: 4.98s Backward time: 3.45s Step time: 3.75s\n", - "428032 Examples seen. Accuracy:0.9068 Error: 0.26568 Loss:0.31001 Threads: 8 Forward time: 5.02s Backward time: 3.47s Step time: 3.73s\n", - "428672 Examples seen. Accuracy:0.9069 Error: 0.26464 Loss:0.19554 Threads: 8 Forward time: 5.00s Backward time: 3.44s Step time: 3.74s\n", - "429312 Examples seen. Accuracy:0.9045 Error: 0.35197 Loss:0.42663 Threads: 8 Forward time: 4.99s Backward time: 3.46s Step time: 3.73s\n", - "429952 Examples seen. Accuracy:0.9037 Error: 0.40357 Loss:0.45106 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.73s\n", - "430592 Examples seen. Accuracy:0.9030 Error: 0.47551 Loss:0.59034 Threads: 8 Forward time: 5.04s Backward time: 3.42s Step time: 3.72s\n", - "431232 Examples seen. Accuracy:0.9040 Error: 0.21036 Loss:0.18538 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 4.25s\n", - "431872 Examples seen. Accuracy:0.9044 Error: 0.24429 Loss:0.35991 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.70s\n", - "432512 Examples seen. Accuracy:0.9046 Error: 0.19176 Loss:0.18209 Threads: 8 Forward time: 4.96s Backward time: 3.44s Step time: 3.74s\n", - "433152 Examples seen. Accuracy:0.9030 Error: 0.23679 Loss:0.22339 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.75s\n", - "433792 Examples seen. Accuracy:0.9009 Error: 0.22331 Loss:0.20803 Threads: 8 Forward time: 4.96s Backward time: 3.44s Step time: 3.76s\n", - "434432 Examples seen. Accuracy:0.8985 Error: 0.28498 Loss:0.25923 Threads: 8 Forward time: 4.94s Backward time: 3.43s Step time: 3.75s\n", - "435072 Examples seen. Accuracy:0.9009 Error: 0.17313 Loss:0.14283 Threads: 8 Forward time: 4.91s Backward time: 3.44s Step time: 3.75s\n", - "435712 Examples seen. Accuracy:0.9019 Error: 0.10770 Loss:0.07412 Threads: 8 Forward time: 4.98s Backward time: 3.45s Step time: 3.74s\n", - "436352 Examples seen. Accuracy:0.9031 Error: 0.17378 Loss:0.13147 Threads: 8 Forward time: 4.96s Backward time: 3.44s Step time: 3.79s\n", - "436992 Examples seen. Accuracy:0.9054 Error: 0.22728 Loss:0.22468 Threads: 8 Forward time: 4.96s Backward time: 3.45s Step time: 3.74s\n", - "437632 Examples seen. Accuracy:0.9045 Error: 0.26207 Loss:0.23495 Threads: 8 Forward time: 4.94s Backward time: 3.46s Step time: 3.73s\n", - "438272 Examples seen. Accuracy:0.9071 Error: 0.25478 Loss:0.27596 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.77s\n", - "438912 Examples seen. Accuracy:0.9066 Error: 0.31478 Loss:0.37640 Threads: 8 Forward time: 5.08s Backward time: 3.45s Step time: 3.75s\n", - "439552 Examples seen. Accuracy:0.9073 Error: 0.28200 Loss:0.28881 Threads: 8 Forward time: 4.92s Backward time: 3.43s Step time: 3.73s\n", - "440192 Examples seen. Accuracy:0.9063 Error: 0.28748 Loss:0.36401 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.71s\n", - "440832 Examples seen. Accuracy:0.9066 Error: 0.27006 Loss:0.27133 Threads: 8 Forward time: 4.98s Backward time: 3.44s Step time: 3.71s\n", - "441472 Examples seen. Accuracy:0.9075 Error: 0.23637 Loss:0.22124 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.73s\n", - "442112 Examples seen. Accuracy:0.9070 Error: 0.19347 Loss:0.18379 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.72s\n", - "442752 Examples seen. Accuracy:0.9046 Error: 0.29052 Loss:0.27703 Threads: 8 Forward time: 4.95s Backward time: 3.40s Step time: 3.72s\n", - "443392 Examples seen. Accuracy:0.9040 Error: 0.37173 Loss:0.53982 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.73s\n", - "444032 Examples seen. Accuracy:0.9023 Error: 0.43279 Loss:0.45847 Threads: 8 Forward time: 4.96s Backward time: 3.46s Step time: 3.73s\n", - "444672 Examples seen. Accuracy:0.9014 Error: 0.11264 Loss:0.10107 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.73s\n", - "445312 Examples seen. Accuracy:0.8993 Error: 0.36809 Loss:0.40034 Threads: 8 Forward time: 5.02s Backward time: 3.45s Step time: 3.78s\n", - "445952 Examples seen. Accuracy:0.9005 Error: 0.29819 Loss:0.40921 Threads: 8 Forward time: 4.98s Backward time: 3.39s Step time: 3.76s\n", - "446592 Examples seen. Accuracy:0.9026 Error: 0.32405 Loss:0.44635 Threads: 8 Forward time: 4.92s Backward time: 3.44s Step time: 3.75s\n", - "447232 Examples seen. Accuracy:0.9034 Error: 0.25668 Loss:0.27812 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.72s\n", - "447872 Examples seen. Accuracy:0.9027 Error: 0.32747 Loss:0.31855 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.72s\n", - "448512 Examples seen. Accuracy:0.9027 Error: 0.22717 Loss:0.28368 Threads: 8 Forward time: 4.96s Backward time: 3.43s Step time: 3.73s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 9 Examples seen:449136 Validation Accuracy: 0.9261 Validation Error: 0.2449 Validation Loss: 0.2281 Total time: 51.18min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 9. Working time: 0.85 hours.\n", - "449776 Examples seen. Accuracy:0.8999 Error: 0.20598 Loss:0.19802 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.74s\n", - "450416 Examples seen. Accuracy:0.9011 Error: 0.29955 Loss:0.24860 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.72s\n", - "451056 Examples seen. Accuracy:0.9013 Error: 0.27500 Loss:0.32053 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.72s\n", - "451696 Examples seen. Accuracy:0.9010 Error: 0.31762 Loss:0.35769 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.77s\n", - "452336 Examples seen. Accuracy:0.9012 Error: 0.20646 Loss:0.19609 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.72s\n", - "452976 Examples seen. Accuracy:0.8994 Error: 0.30841 Loss:0.33099 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.73s\n", - "453616 Examples seen. Accuracy:0.8981 Error: 0.48714 Loss:0.50947 Threads: 8 Forward time: 5.02s Backward time: 3.40s Step time: 3.73s\n", - "454256 Examples seen. Accuracy:0.8983 Error: 0.27420 Loss:0.25333 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.75s\n", - "454896 Examples seen. Accuracy:0.9002 Error: 0.29410 Loss:0.31086 Threads: 8 Forward time: 5.23s Backward time: 3.62s Step time: 3.76s\n", - "455536 Examples seen. Accuracy:0.9000 Error: 0.36615 Loss:0.55404 Threads: 8 Forward time: 4.99s Backward time: 3.43s Step time: 4.30s\n", - "456176 Examples seen. Accuracy:0.8995 Error: 0.31797 Loss:0.34255 Threads: 8 Forward time: 4.95s Backward time: 3.38s Step time: 3.71s\n", - "456816 Examples seen. Accuracy:0.9013 Error: 0.13621 Loss:0.09656 Threads: 8 Forward time: 5.00s Backward time: 3.42s Step time: 3.71s\n", - "457456 Examples seen. Accuracy:0.8996 Error: 0.23699 Loss:0.28439 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.76s\n", - "458096 Examples seen. Accuracy:0.8997 Error: 0.26329 Loss:0.36544 Threads: 8 Forward time: 4.94s Backward time: 3.41s Step time: 3.79s\n", - "458736 Examples seen. Accuracy:0.9002 Error: 0.31634 Loss:0.39390 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.74s\n", - "459376 Examples seen. Accuracy:0.9019 Error: 0.20197 Loss:0.20080 Threads: 8 Forward time: 4.93s Backward time: 3.40s Step time: 3.72s\n", - "460016 Examples seen. Accuracy:0.9020 Error: 0.23047 Loss:0.17820 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.73s\n", - "460656 Examples seen. Accuracy:0.9019 Error: 0.19411 Loss:0.17867 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.73s\n", - "461296 Examples seen. Accuracy:0.9027 Error: 0.22714 Loss:0.19193 Threads: 8 Forward time: 4.92s Backward time: 3.43s Step time: 3.72s\n", - "461936 Examples seen. Accuracy:0.9042 Error: 0.19459 Loss:0.16151 Threads: 8 Forward time: 4.92s Backward time: 3.38s Step time: 3.75s\n", - "462576 Examples seen. Accuracy:0.9015 Error: 0.22002 Loss:0.18069 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.72s\n", - "463216 Examples seen. Accuracy:0.9030 Error: 0.18416 Loss:0.15353 Threads: 8 Forward time: 4.98s Backward time: 3.41s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "463856 Examples seen. Accuracy:0.9045 Error: 0.11846 Loss:0.09183 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.72s\n", - "464496 Examples seen. Accuracy:0.9064 Error: 0.21130 Loss:0.20232 Threads: 8 Forward time: 4.94s Backward time: 3.41s Step time: 3.72s\n", - "465136 Examples seen. Accuracy:0.9036 Error: 0.34616 Loss:0.57110 Threads: 8 Forward time: 4.90s Backward time: 3.42s Step time: 3.70s\n", - "465776 Examples seen. Accuracy:0.9024 Error: 0.25141 Loss:0.26351 Threads: 8 Forward time: 4.91s Backward time: 3.43s Step time: 3.71s\n", - "466416 Examples seen. Accuracy:0.9017 Error: 0.29043 Loss:0.35970 Threads: 8 Forward time: 4.93s Backward time: 3.45s Step time: 3.69s\n", - "467056 Examples seen. Accuracy:0.9018 Error: 0.32809 Loss:0.44279 Threads: 8 Forward time: 4.95s Backward time: 3.39s Step time: 3.76s\n", - "467696 Examples seen. Accuracy:0.9012 Error: 0.32153 Loss:0.33375 Threads: 8 Forward time: 5.00s Backward time: 3.44s Step time: 3.71s\n", - "468336 Examples seen. Accuracy:0.9023 Error: 0.26314 Loss:0.24968 Threads: 8 Forward time: 4.97s Backward time: 3.44s Step time: 3.71s\n", - "468976 Examples seen. Accuracy:0.9046 Error: 0.21019 Loss:0.18661 Threads: 8 Forward time: 4.93s Backward time: 3.42s Step time: 3.70s\n", - "469616 Examples seen. Accuracy:0.9062 Error: 0.32064 Loss:0.37426 Threads: 8 Forward time: 4.94s Backward time: 3.42s Step time: 3.80s\n", - "470256 Examples seen. Accuracy:0.9068 Error: 0.34623 Loss:0.39703 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.70s\n", - "470896 Examples seen. Accuracy:0.9040 Error: 0.27478 Loss:0.26097 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.69s\n", - "471536 Examples seen. Accuracy:0.9036 Error: 0.21156 Loss:0.17101 Threads: 8 Forward time: 4.92s Backward time: 3.46s Step time: 3.69s\n", - "472176 Examples seen. Accuracy:0.9031 Error: 0.32779 Loss:0.34318 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.70s\n", - "472816 Examples seen. Accuracy:0.9039 Error: 0.18085 Loss:0.18187 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.70s\n", - "473456 Examples seen. Accuracy:0.9047 Error: 0.25392 Loss:0.22012 Threads: 8 Forward time: 4.95s Backward time: 3.39s Step time: 3.71s\n", - "474096 Examples seen. Accuracy:0.9061 Error: 0.19583 Loss:0.39767 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 3.70s\n", - "474736 Examples seen. Accuracy:0.9050 Error: 0.27020 Loss:0.24882 Threads: 8 Forward time: 4.95s Backward time: 3.44s Step time: 3.70s\n", - "475376 Examples seen. Accuracy:0.9039 Error: 0.25722 Loss:0.34675 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.71s\n", - "476016 Examples seen. Accuracy:0.9034 Error: 0.25629 Loss:0.20712 Threads: 8 Forward time: 4.99s Backward time: 3.43s Step time: 3.72s\n", - "476656 Examples seen. Accuracy:0.9038 Error: 0.27931 Loss:0.25116 Threads: 8 Forward time: 4.98s Backward time: 3.41s Step time: 3.74s\n", - "477296 Examples seen. Accuracy:0.9039 Error: 0.24468 Loss:0.28487 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.72s\n", - "477936 Examples seen. Accuracy:0.9054 Error: 0.20802 Loss:0.24282 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.70s\n", - "478576 Examples seen. Accuracy:0.9067 Error: 0.26210 Loss:0.26591 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.72s\n", - "479216 Examples seen. Accuracy:0.9079 Error: 0.20256 Loss:0.17632 Threads: 8 Forward time: 4.90s Backward time: 3.38s Step time: 3.70s\n", - "479856 Examples seen. Accuracy:0.9085 Error: 0.24697 Loss:0.20387 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.70s\n", - "480496 Examples seen. Accuracy:0.9078 Error: 0.28552 Loss:0.32312 Threads: 8 Forward time: 5.03s Backward time: 3.45s Step time: 3.74s\n", - "481136 Examples seen. Accuracy:0.9082 Error: 0.24127 Loss:0.29530 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.79s\n", - "481776 Examples seen. Accuracy:0.9097 Error: 0.22376 Loss:0.20318 Threads: 8 Forward time: 5.03s Backward time: 3.42s Step time: 3.73s\n", - "482416 Examples seen. Accuracy:0.9084 Error: 0.30161 Loss:0.32139 Threads: 8 Forward time: 5.02s Backward time: 3.42s Step time: 3.75s\n", - "483056 Examples seen. Accuracy:0.9080 Error: 0.25141 Loss:0.22090 Threads: 8 Forward time: 5.02s Backward time: 3.43s Step time: 3.74s\n", - "483696 Examples seen. Accuracy:0.9081 Error: 0.32059 Loss:0.43725 Threads: 8 Forward time: 5.01s Backward time: 3.44s Step time: 3.74s\n", - "484336 Examples seen. Accuracy:0.9090 Error: 0.15752 Loss:0.14132 Threads: 8 Forward time: 5.01s Backward time: 3.43s Step time: 4.28s\n", - "484976 Examples seen. Accuracy:0.9097 Error: 0.29585 Loss:0.39107 Threads: 8 Forward time: 4.95s Backward time: 3.42s Step time: 3.76s\n", - "485616 Examples seen. Accuracy:0.9101 Error: 0.22055 Loss:0.17601 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.72s\n", - "486256 Examples seen. Accuracy:0.9107 Error: 0.17638 Loss:0.11782 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.70s\n", - "486896 Examples seen. Accuracy:0.9099 Error: 0.40099 Loss:0.41963 Threads: 8 Forward time: 5.01s Backward time: 3.41s Step time: 3.76s\n", - "487536 Examples seen. Accuracy:0.9071 Error: 0.38620 Loss:0.47616 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.75s\n", - "488176 Examples seen. Accuracy:0.9080 Error: 0.26703 Loss:0.22957 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.76s\n", - "488816 Examples seen. Accuracy:0.9097 Error: 0.21145 Loss:0.31018 Threads: 8 Forward time: 4.93s Backward time: 3.41s Step time: 3.67s\n", - "489456 Examples seen. Accuracy:0.9100 Error: 0.28310 Loss:0.27703 Threads: 8 Forward time: 5.00s Backward time: 3.44s Step time: 3.78s\n", - "490096 Examples seen. Accuracy:0.9078 Error: 0.21226 Loss:0.27938 Threads: 8 Forward time: 4.96s Backward time: 3.41s Step time: 3.70s\n", - "490736 Examples seen. Accuracy:0.9087 Error: 0.20895 Loss:0.19127 Threads: 8 Forward time: 4.97s Backward time: 3.43s Step time: 3.69s\n", - "491376 Examples seen. Accuracy:0.9085 Error: 0.17187 Loss:0.16533 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.70s\n", - "492016 Examples seen. Accuracy:0.9087 Error: 0.32022 Loss:0.29375 Threads: 8 Forward time: 5.02s Backward time: 3.45s Step time: 3.71s\n", - "492656 Examples seen. Accuracy:0.9078 Error: 0.26783 Loss:0.30460 Threads: 8 Forward time: 5.00s Backward time: 3.43s Step time: 3.79s\n", - "493296 Examples seen. Accuracy:0.9065 Error: 0.26092 Loss:0.30560 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.70s\n", - "493936 Examples seen. Accuracy:0.9058 Error: 0.23102 Loss:0.22465 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.67s\n", - "494576 Examples seen. Accuracy:0.9058 Error: 0.37346 Loss:0.40639 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.68s\n", - "495216 Examples seen. Accuracy:0.9076 Error: 0.17668 Loss:0.18873 Threads: 8 Forward time: 4.90s Backward time: 3.37s Step time: 3.66s\n", - "495856 Examples seen. Accuracy:0.9081 Error: 0.26991 Loss:0.23883 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.65s\n", - "496496 Examples seen. Accuracy:0.9090 Error: 0.18075 Loss:0.15428 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.66s\n", - "497136 Examples seen. Accuracy:0.9078 Error: 0.27681 Loss:0.28423 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.66s\n", - "497776 Examples seen. Accuracy:0.9080 Error: 0.21698 Loss:0.25634 Threads: 8 Forward time: 4.91s Backward time: 3.37s Step time: 3.66s\n", - "498416 Examples seen. Accuracy:0.9084 Error: 0.20058 Loss:0.13238 Threads: 8 Forward time: 4.92s Backward time: 3.36s Step time: 3.65s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 10 Examples seen:499040 Validation Accuracy: 0.9384 Validation Error: 0.1939 Validation Loss: 0.1803 Total time: 56.51min\n", - "Starting Testing.\n", - "Epochs: 10 Examples seen:499040 Test Accuracy: 0.9467 Test Error: 0.1922 Test Loss: 0.1646 Total time: 56.98min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 10. Working time: 0.95 hours.\n", - "Learning rate set to:0.00090\n", - "499680 Examples seen. Accuracy:0.9075 Error: 0.25707 Loss:0.21631 Threads: 8 Forward time: 4.92s Backward time: 3.36s Step time: 3.75s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "500320 Examples seen. Accuracy:0.9101 Error: 0.20803 Loss:0.16103 Threads: 8 Forward time: 5.03s Backward time: 3.39s Step time: 3.72s\n", - "500960 Examples seen. Accuracy:0.9105 Error: 0.16927 Loss:0.26840 Threads: 8 Forward time: 5.06s Backward time: 3.41s Step time: 3.74s\n", - "501600 Examples seen. Accuracy:0.9110 Error: 0.21942 Loss:0.24421 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.73s\n", - "502240 Examples seen. Accuracy:0.9120 Error: 0.21498 Loss:0.18714 Threads: 8 Forward time: 5.03s Backward time: 3.42s Step time: 3.75s\n", - "502880 Examples seen. Accuracy:0.9127 Error: 0.13512 Loss:0.09639 Threads: 8 Forward time: 5.00s Backward time: 3.45s Step time: 3.74s\n", - "503520 Examples seen. Accuracy:0.9126 Error: 0.28967 Loss:0.28949 Threads: 8 Forward time: 5.09s Backward time: 3.46s Step time: 3.80s\n", - "504160 Examples seen. Accuracy:0.9131 Error: 0.21126 Loss:0.20389 Threads: 8 Forward time: 6.99s Backward time: 5.28s Step time: 4.08s\n", - "504800 Examples seen. Accuracy:0.9142 Error: 0.24593 Loss:0.30065 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.97s\n", - "505440 Examples seen. Accuracy:0.9130 Error: 0.34897 Loss:0.36845 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.71s\n", - "506080 Examples seen. Accuracy:0.9130 Error: 0.32447 Loss:0.40064 Threads: 8 Forward time: 4.91s Backward time: 3.38s Step time: 3.71s\n", - "506720 Examples seen. Accuracy:0.9142 Error: 0.21962 Loss:0.22803 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.71s\n", - "507360 Examples seen. Accuracy:0.9139 Error: 0.27843 Loss:0.27879 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.69s\n", - "508000 Examples seen. Accuracy:0.9144 Error: 0.18976 Loss:0.17273 Threads: 8 Forward time: 4.99s Backward time: 3.42s Step time: 3.68s\n", - "508640 Examples seen. Accuracy:0.9145 Error: 0.18255 Loss:0.12871 Threads: 8 Forward time: 5.00s Backward time: 3.42s Step time: 3.68s\n", - "509280 Examples seen. Accuracy:0.9154 Error: 0.18851 Loss:0.22011 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.69s\n", - "509920 Examples seen. Accuracy:0.9142 Error: 0.37877 Loss:0.36610 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.69s\n", - "510560 Examples seen. Accuracy:0.9147 Error: 0.18671 Loss:0.17910 Threads: 8 Forward time: 4.93s Backward time: 3.42s Step time: 3.66s\n", - "511200 Examples seen. Accuracy:0.9148 Error: 0.13493 Loss:0.08595 Threads: 8 Forward time: 4.91s Backward time: 3.42s Step time: 3.66s\n", - "511840 Examples seen. Accuracy:0.9153 Error: 0.25926 Loss:0.30186 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.69s\n", - "512480 Examples seen. Accuracy:0.9162 Error: 0.05866 Loss:0.06469 Threads: 8 Forward time: 4.92s Backward time: 3.40s Step time: 3.68s\n", - "513120 Examples seen. Accuracy:0.9164 Error: 0.24818 Loss:0.27046 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.67s\n", - "513760 Examples seen. Accuracy:0.9156 Error: 0.20894 Loss:0.16550 Threads: 8 Forward time: 5.03s Backward time: 3.44s Step time: 3.72s\n", - "514400 Examples seen. Accuracy:0.9169 Error: 0.22496 Loss:0.25856 Threads: 8 Forward time: 4.98s Backward time: 3.41s Step time: 3.71s\n", - "515040 Examples seen. Accuracy:0.9152 Error: 0.25079 Loss:0.23001 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.71s\n", - "515680 Examples seen. Accuracy:0.9167 Error: 0.22300 Loss:0.16596 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.70s\n", - "516320 Examples seen. Accuracy:0.9162 Error: 0.35507 Loss:0.38129 Threads: 8 Forward time: 4.98s Backward time: 3.42s Step time: 3.73s\n", - "516960 Examples seen. Accuracy:0.9161 Error: 0.25773 Loss:0.30329 Threads: 8 Forward time: 4.96s Backward time: 3.39s Step time: 3.72s\n", - "517600 Examples seen. Accuracy:0.9169 Error: 0.23529 Loss:0.25212 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.72s\n", - "518240 Examples seen. Accuracy:0.9149 Error: 0.30573 Loss:0.39360 Threads: 8 Forward time: 4.99s Backward time: 3.44s Step time: 3.72s\n", - "518880 Examples seen. Accuracy:0.9156 Error: 0.20444 Loss:0.21559 Threads: 8 Forward time: 4.94s Backward time: 3.38s Step time: 3.73s\n", - "519520 Examples seen. Accuracy:0.9157 Error: 0.13176 Loss:0.12838 Threads: 8 Forward time: 4.99s Backward time: 3.41s Step time: 3.73s\n", - "520160 Examples seen. Accuracy:0.9163 Error: 0.27645 Loss:0.38159 Threads: 8 Forward time: 5.02s Backward time: 3.40s Step time: 3.71s\n", - "520800 Examples seen. Accuracy:0.9157 Error: 0.20431 Loss:0.23637 Threads: 8 Forward time: 4.99s Backward time: 3.40s Step time: 3.69s\n", - "521440 Examples seen. Accuracy:0.9172 Error: 0.33938 Loss:0.48311 Threads: 8 Forward time: 4.92s Backward time: 3.38s Step time: 3.69s\n", - "522080 Examples seen. Accuracy:0.9188 Error: 0.16453 Loss:0.12930 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.68s\n", - "522720 Examples seen. Accuracy:0.9196 Error: 0.11996 Loss:0.09880 Threads: 8 Forward time: 4.92s Backward time: 3.39s Step time: 3.67s\n", - "523360 Examples seen. Accuracy:0.9166 Error: 0.27744 Loss:0.37526 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.67s\n", - "524000 Examples seen. Accuracy:0.9155 Error: 0.32177 Loss:0.33154 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.66s\n", - "524640 Examples seen. Accuracy:0.9168 Error: 0.14842 Loss:0.11237 Threads: 8 Forward time: 5.07s Backward time: 3.45s Step time: 3.82s\n", - "525280 Examples seen. Accuracy:0.9172 Error: 0.16090 Loss:0.15605 Threads: 8 Forward time: 5.00s Backward time: 3.41s Step time: 3.75s\n", - "525920 Examples seen. Accuracy:0.9168 Error: 0.17786 Loss:0.12825 Threads: 8 Forward time: 5.05s Backward time: 3.42s Step time: 3.78s\n", - "526560 Examples seen. Accuracy:0.9158 Error: 0.24808 Loss:0.33885 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.78s\n", - "527200 Examples seen. Accuracy:0.9155 Error: 0.19414 Loss:0.15540 Threads: 8 Forward time: 4.96s Backward time: 3.40s Step time: 3.75s\n", - "527840 Examples seen. Accuracy:0.9150 Error: 0.20107 Loss:0.16529 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.76s\n", - "528480 Examples seen. Accuracy:0.9158 Error: 0.21912 Loss:0.17129 Threads: 8 Forward time: 4.97s Backward time: 3.41s Step time: 3.76s\n", - "529120 Examples seen. Accuracy:0.9155 Error: 0.30580 Loss:0.35358 Threads: 8 Forward time: 4.99s Backward time: 3.40s Step time: 3.74s\n", - "529760 Examples seen. Accuracy:0.9171 Error: 0.20621 Loss:0.17324 Threads: 8 Forward time: 4.95s Backward time: 3.39s Step time: 3.75s\n", - "530400 Examples seen. Accuracy:0.9169 Error: 0.39283 Loss:0.45150 Threads: 8 Forward time: 4.98s Backward time: 3.44s Step time: 3.76s\n", - "531040 Examples seen. Accuracy:0.9176 Error: 0.23696 Loss:0.26160 Threads: 8 Forward time: 5.05s Backward time: 3.44s Step time: 3.79s\n", - "531680 Examples seen. Accuracy:0.9176 Error: 0.21844 Loss:0.25647 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.77s\n", - "532320 Examples seen. Accuracy:0.9177 Error: 0.21973 Loss:0.28256 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.77s\n", - "532960 Examples seen. Accuracy:0.9183 Error: 0.24940 Loss:0.24267 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 3.77s\n", - "533600 Examples seen. Accuracy:0.9178 Error: 0.33766 Loss:0.38351 Threads: 8 Forward time: 5.09s Backward time: 3.40s Step time: 3.80s\n", - "534240 Examples seen. Accuracy:0.9176 Error: 0.26446 Loss:0.25359 Threads: 8 Forward time: 4.93s Backward time: 3.43s Step time: 4.32s\n", - "534880 Examples seen. Accuracy:0.9192 Error: 0.23802 Loss:0.27079 Threads: 8 Forward time: 4.96s Backward time: 3.38s Step time: 3.69s\n", - "535520 Examples seen. Accuracy:0.9190 Error: 0.28623 Loss:0.34778 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.68s\n", - "536160 Examples seen. Accuracy:0.9180 Error: 0.28343 Loss:0.30509 Threads: 8 Forward time: 4.90s Backward time: 3.39s Step time: 3.68s\n", - "536800 Examples seen. Accuracy:0.9182 Error: 0.27138 Loss:0.35933 Threads: 8 Forward time: 4.91s Backward time: 3.35s Step time: 3.65s\n", - "537440 Examples seen. Accuracy:0.9176 Error: 0.16258 Loss:0.15319 Threads: 8 Forward time: 4.90s Backward time: 3.36s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "538080 Examples seen. Accuracy:0.9185 Error: 0.14722 Loss:0.13124 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.65s\n", - "538720 Examples seen. Accuracy:0.9195 Error: 0.35435 Loss:0.51405 Threads: 8 Forward time: 4.90s Backward time: 3.39s Step time: 3.65s\n", - "539360 Examples seen. Accuracy:0.9189 Error: 0.18575 Loss:0.15035 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.64s\n", - "540000 Examples seen. Accuracy:0.9187 Error: 0.27777 Loss:0.30759 Threads: 8 Forward time: 4.89s Backward time: 3.39s Step time: 3.66s\n", - "540640 Examples seen. Accuracy:0.9191 Error: 0.17585 Loss:0.15341 Threads: 8 Forward time: 4.92s Backward time: 3.38s Step time: 3.68s\n", - "541280 Examples seen. Accuracy:0.9190 Error: 0.23668 Loss:0.28170 Threads: 8 Forward time: 4.95s Backward time: 3.40s Step time: 3.69s\n", - "541920 Examples seen. Accuracy:0.9200 Error: 0.21885 Loss:0.32181 Threads: 8 Forward time: 4.90s Backward time: 3.41s Step time: 3.69s\n", - "542560 Examples seen. Accuracy:0.9198 Error: 0.21759 Loss:0.29533 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.70s\n", - "543200 Examples seen. Accuracy:0.9205 Error: 0.18567 Loss:0.17029 Threads: 8 Forward time: 5.00s Backward time: 3.42s Step time: 3.74s\n", - "543840 Examples seen. Accuracy:0.9186 Error: 0.30721 Loss:0.38167 Threads: 8 Forward time: 4.92s Backward time: 3.42s Step time: 3.76s\n", - "544480 Examples seen. Accuracy:0.9190 Error: 0.24838 Loss:0.23080 Threads: 8 Forward time: 4.98s Backward time: 3.37s Step time: 3.65s\n", - "545120 Examples seen. Accuracy:0.9193 Error: 0.14867 Loss:0.10943 Threads: 8 Forward time: 4.91s Backward time: 3.37s Step time: 3.67s\n", - "545760 Examples seen. Accuracy:0.9205 Error: 0.19442 Loss:0.33386 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.66s\n", - "546400 Examples seen. Accuracy:0.9197 Error: 0.21721 Loss:0.21738 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.65s\n", - "547040 Examples seen. Accuracy:0.9198 Error: 0.19133 Loss:0.15499 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.66s\n", - "547680 Examples seen. Accuracy:0.9186 Error: 0.34019 Loss:0.42148 Threads: 8 Forward time: 4.94s Backward time: 3.40s Step time: 3.75s\n", - "548320 Examples seen. Accuracy:0.9152 Error: 0.22790 Loss:0.23251 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.73s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 11 Examples seen:548944 Validation Accuracy: 0.9518 Validation Error: 0.1524 Validation Loss: 0.1459 Total time: 62.29min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 11. Working time: 1.04 hours.\n", - "549584 Examples seen. Accuracy:0.9144 Error: 0.23092 Loss:0.36938 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.72s\n", - "550224 Examples seen. Accuracy:0.9128 Error: 0.26194 Loss:0.20998 Threads: 8 Forward time: 5.03s Backward time: 3.43s Step time: 3.70s\n", - "550864 Examples seen. Accuracy:0.9119 Error: 0.25531 Loss:0.36814 Threads: 8 Forward time: 5.03s Backward time: 3.40s Step time: 3.74s\n", - "551504 Examples seen. Accuracy:0.9126 Error: 0.18326 Loss:0.19310 Threads: 8 Forward time: 5.01s Backward time: 3.41s Step time: 3.73s\n", - "552144 Examples seen. Accuracy:0.9117 Error: 0.32824 Loss:0.36186 Threads: 8 Forward time: 5.11s Backward time: 3.45s Step time: 3.76s\n", - "552784 Examples seen. Accuracy:0.9117 Error: 0.25053 Loss:0.19268 Threads: 8 Forward time: 5.06s Backward time: 3.52s Step time: 3.78s\n", - "553424 Examples seen. Accuracy:0.9116 Error: 0.15198 Loss:0.15024 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.75s\n", - "554064 Examples seen. Accuracy:0.9123 Error: 0.23473 Loss:0.24098 Threads: 8 Forward time: 4.94s Backward time: 3.38s Step time: 3.73s\n", - "554704 Examples seen. Accuracy:0.9116 Error: 0.23039 Loss:0.32462 Threads: 8 Forward time: 4.95s Backward time: 3.38s Step time: 3.71s\n", - "555344 Examples seen. Accuracy:0.9129 Error: 0.20236 Loss:0.16263 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.72s\n", - "555984 Examples seen. Accuracy:0.9142 Error: 0.20465 Loss:0.21534 Threads: 8 Forward time: 4.99s Backward time: 3.39s Step time: 3.73s\n", - "556624 Examples seen. Accuracy:0.9136 Error: 0.26839 Loss:0.25619 Threads: 8 Forward time: 5.05s Backward time: 3.39s Step time: 3.73s\n", - "557264 Examples seen. Accuracy:0.9130 Error: 0.35622 Loss:0.40614 Threads: 8 Forward time: 5.03s Backward time: 3.38s Step time: 3.76s\n", - "557904 Examples seen. Accuracy:0.9126 Error: 0.23685 Loss:0.22980 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.72s\n", - "558544 Examples seen. Accuracy:0.9111 Error: 0.16157 Loss:0.11267 Threads: 8 Forward time: 5.05s Backward time: 3.38s Step time: 3.71s\n", - "559184 Examples seen. Accuracy:0.9116 Error: 0.28721 Loss:0.23702 Threads: 8 Forward time: 5.00s Backward time: 3.40s Step time: 4.46s\n", - "559824 Examples seen. Accuracy:0.9134 Error: 0.15638 Loss:0.11443 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.99s\n", - "560464 Examples seen. Accuracy:0.9139 Error: 0.20917 Loss:0.35016 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.86s\n", - "561104 Examples seen. Accuracy:0.9149 Error: 0.20351 Loss:0.14927 Threads: 8 Forward time: 5.00s Backward time: 3.40s Step time: 3.71s\n", - "561744 Examples seen. Accuracy:0.9146 Error: 0.31762 Loss:0.28682 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.65s\n", - "562384 Examples seen. Accuracy:0.9139 Error: 0.27747 Loss:0.36727 Threads: 8 Forward time: 4.90s Backward time: 3.36s Step time: 3.65s\n", - "563024 Examples seen. Accuracy:0.9138 Error: 0.21675 Loss:0.27570 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.67s\n", - "563664 Examples seen. Accuracy:0.9146 Error: 0.20100 Loss:0.19049 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.66s\n", - "564304 Examples seen. Accuracy:0.9150 Error: 0.22765 Loss:0.24695 Threads: 8 Forward time: 5.01s Backward time: 3.36s Step time: 3.67s\n", - "564944 Examples seen. Accuracy:0.9179 Error: 0.27237 Loss:0.26024 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.67s\n", - "565584 Examples seen. Accuracy:0.9172 Error: 0.25457 Loss:0.29427 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.66s\n", - "566224 Examples seen. Accuracy:0.9172 Error: 0.20643 Loss:0.17602 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.64s\n", - "566864 Examples seen. Accuracy:0.9160 Error: 0.15186 Loss:0.11778 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.68s\n", - "567504 Examples seen. Accuracy:0.9167 Error: 0.14150 Loss:0.28891 Threads: 8 Forward time: 4.92s Backward time: 3.34s Step time: 3.66s\n", - "568144 Examples seen. Accuracy:0.9153 Error: 0.20137 Loss:0.14648 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.68s\n", - "568784 Examples seen. Accuracy:0.9158 Error: 0.19455 Loss:0.15975 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.72s\n", - "569424 Examples seen. Accuracy:0.9149 Error: 0.32549 Loss:0.38222 Threads: 8 Forward time: 4.90s Backward time: 3.40s Step time: 3.66s\n", - "570064 Examples seen. Accuracy:0.9161 Error: 0.23548 Loss:0.22350 Threads: 8 Forward time: 4.94s Backward time: 3.42s Step time: 3.67s\n", - "570704 Examples seen. Accuracy:0.9177 Error: 0.16450 Loss:0.13579 Threads: 8 Forward time: 4.89s Backward time: 3.40s Step time: 3.68s\n", - "571344 Examples seen. Accuracy:0.9192 Error: 0.14622 Loss:0.11167 Threads: 8 Forward time: 4.90s Backward time: 3.38s Step time: 3.66s\n", - "571984 Examples seen. Accuracy:0.9217 Error: 0.25907 Loss:0.35041 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.68s\n", - "572624 Examples seen. Accuracy:0.9191 Error: 0.20682 Loss:0.21008 Threads: 8 Forward time: 4.94s Backward time: 3.43s Step time: 3.69s\n", - "573264 Examples seen. Accuracy:0.9191 Error: 0.15297 Loss:0.14154 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.71s\n", - "573904 Examples seen. Accuracy:0.9205 Error: 0.19884 Loss:0.16167 Threads: 8 Forward time: 4.94s Backward time: 3.38s Step time: 3.70s\n", - "574544 Examples seen. Accuracy:0.9207 Error: 0.24615 Loss:0.21966 Threads: 8 Forward time: 5.02s Backward time: 3.41s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "575184 Examples seen. Accuracy:0.9192 Error: 0.40491 Loss:0.56969 Threads: 8 Forward time: 4.98s Backward time: 3.43s Step time: 3.72s\n", - "575824 Examples seen. Accuracy:0.9192 Error: 0.27584 Loss:0.24388 Threads: 8 Forward time: 4.89s Backward time: 3.39s Step time: 3.67s\n", - "576464 Examples seen. Accuracy:0.9193 Error: 0.20617 Loss:0.18758 Threads: 8 Forward time: 4.93s Backward time: 3.39s Step time: 3.67s\n", - "577104 Examples seen. Accuracy:0.9192 Error: 0.21042 Loss:0.17100 Threads: 8 Forward time: 4.94s Backward time: 3.39s Step time: 3.66s\n", - "577744 Examples seen. Accuracy:0.9173 Error: 0.27497 Loss:0.29829 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.66s\n", - "578384 Examples seen. Accuracy:0.9188 Error: 0.16427 Loss:0.12574 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.65s\n", - "579024 Examples seen. Accuracy:0.9191 Error: 0.15256 Loss:0.13585 Threads: 8 Forward time: 4.91s Backward time: 3.37s Step time: 3.64s\n", - "579664 Examples seen. Accuracy:0.9182 Error: 0.21550 Loss:0.28664 Threads: 8 Forward time: 5.01s Backward time: 3.37s Step time: 3.71s\n", - "580304 Examples seen. Accuracy:0.9192 Error: 0.23448 Loss:0.22764 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.77s\n", - "580944 Examples seen. Accuracy:0.9195 Error: 0.16520 Loss:0.19715 Threads: 8 Forward time: 5.03s Backward time: 3.40s Step time: 3.72s\n", - "581584 Examples seen. Accuracy:0.9188 Error: 0.23176 Loss:0.23918 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.71s\n", - "582224 Examples seen. Accuracy:0.9188 Error: 0.27296 Loss:0.27981 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.67s\n", - "582864 Examples seen. Accuracy:0.9182 Error: 0.18141 Loss:0.16519 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.69s\n", - "583504 Examples seen. Accuracy:0.9176 Error: 0.18723 Loss:0.19415 Threads: 8 Forward time: 4.95s Backward time: 3.36s Step time: 3.68s\n", - "584144 Examples seen. Accuracy:0.9172 Error: 0.24776 Loss:0.29961 Threads: 8 Forward time: 4.97s Backward time: 3.43s Step time: 3.69s\n", - "584784 Examples seen. Accuracy:0.9171 Error: 0.24251 Loss:0.23034 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.68s\n", - "585424 Examples seen. Accuracy:0.9177 Error: 0.13297 Loss:0.13619 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.70s\n", - "586064 Examples seen. Accuracy:0.9183 Error: 0.20108 Loss:0.25198 Threads: 8 Forward time: 4.97s Backward time: 3.40s Step time: 3.69s\n", - "586704 Examples seen. Accuracy:0.9183 Error: 0.24632 Loss:0.18730 Threads: 8 Forward time: 4.98s Backward time: 3.38s Step time: 3.68s\n", - "587344 Examples seen. Accuracy:0.9184 Error: 0.20645 Loss:0.23060 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.68s\n", - "587984 Examples seen. Accuracy:0.9182 Error: 0.10230 Loss:0.06580 Threads: 8 Forward time: 4.98s Backward time: 3.39s Step time: 3.69s\n", - "588624 Examples seen. Accuracy:0.9196 Error: 0.16517 Loss:0.17498 Threads: 8 Forward time: 5.00s Backward time: 3.38s Step time: 3.69s\n", - "589264 Examples seen. Accuracy:0.9203 Error: 0.16410 Loss:0.18330 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 4.39s\n", - "589904 Examples seen. Accuracy:0.9213 Error: 0.27152 Loss:0.27096 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.73s\n", - "590544 Examples seen. Accuracy:0.9222 Error: 0.12742 Loss:0.11996 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.74s\n", - "591184 Examples seen. Accuracy:0.9221 Error: 0.22901 Loss:0.21949 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.76s\n", - "591824 Examples seen. Accuracy:0.9219 Error: 0.14309 Loss:0.10733 Threads: 8 Forward time: 4.93s Backward time: 3.38s Step time: 3.76s\n", - "592464 Examples seen. Accuracy:0.9195 Error: 0.30420 Loss:0.28920 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.73s\n", - "593104 Examples seen. Accuracy:0.9203 Error: 0.17870 Loss:0.15471 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.76s\n", - "593744 Examples seen. Accuracy:0.9186 Error: 0.29441 Loss:0.43107 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.69s\n", - "594384 Examples seen. Accuracy:0.9176 Error: 0.26896 Loss:0.24342 Threads: 8 Forward time: 5.00s Backward time: 3.38s Step time: 3.71s\n", - "595024 Examples seen. Accuracy:0.9183 Error: 0.14405 Loss:0.12767 Threads: 8 Forward time: 4.91s Backward time: 3.34s Step time: 3.69s\n", - "595664 Examples seen. Accuracy:0.9189 Error: 0.23333 Loss:0.31450 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.68s\n", - "596304 Examples seen. Accuracy:0.9183 Error: 0.26393 Loss:0.32661 Threads: 8 Forward time: 4.91s Backward time: 3.36s Step time: 3.69s\n", - "596944 Examples seen. Accuracy:0.9191 Error: 0.19612 Loss:0.14478 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.69s\n", - "597584 Examples seen. Accuracy:0.9217 Error: 0.16814 Loss:0.16207 Threads: 8 Forward time: 4.99s Backward time: 3.40s Step time: 3.70s\n", - "598224 Examples seen. Accuracy:0.9232 Error: 0.30825 Loss:0.31310 Threads: 8 Forward time: 4.91s Backward time: 3.39s Step time: 3.70s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 12 Examples seen:598848 Validation Accuracy: 0.9594 Validation Error: 0.1270 Validation Loss: 0.1242 Total time: 67.61min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 12. Working time: 1.13 hours.\n", - "599488 Examples seen. Accuracy:0.9242 Error: 0.10997 Loss:0.09514 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.77s\n", - "600128 Examples seen. Accuracy:0.9227 Error: 0.32213 Loss:0.41548 Threads: 8 Forward time: 4.92s Backward time: 3.37s Step time: 3.71s\n", - "600768 Examples seen. Accuracy:0.9228 Error: 0.21764 Loss:0.22738 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.69s\n", - "601408 Examples seen. Accuracy:0.9243 Error: 0.25922 Loss:0.25940 Threads: 8 Forward time: 4.98s Backward time: 3.39s Step time: 3.71s\n", - "602048 Examples seen. Accuracy:0.9238 Error: 0.30416 Loss:0.32473 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.71s\n", - "602688 Examples seen. Accuracy:0.9245 Error: 0.24544 Loss:0.25958 Threads: 8 Forward time: 4.97s Backward time: 3.39s Step time: 3.75s\n", - "603328 Examples seen. Accuracy:0.9231 Error: 0.17887 Loss:0.20883 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.70s\n", - "603968 Examples seen. Accuracy:0.9252 Error: 0.13774 Loss:0.14632 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.69s\n", - "604608 Examples seen. Accuracy:0.9265 Error: 0.30910 Loss:0.27277 Threads: 8 Forward time: 4.89s Backward time: 3.33s Step time: 3.68s\n", - "605248 Examples seen. Accuracy:0.9282 Error: 0.14430 Loss:0.12108 Threads: 8 Forward time: 5.02s Backward time: 3.37s Step time: 3.70s\n", - "605888 Examples seen. Accuracy:0.9287 Error: 0.19664 Loss:0.23849 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.74s\n", - "606528 Examples seen. Accuracy:0.9280 Error: 0.16660 Loss:0.15280 Threads: 8 Forward time: 4.99s Backward time: 3.38s Step time: 3.72s\n", - "607168 Examples seen. Accuracy:0.9265 Error: 0.22760 Loss:0.23802 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.71s\n", - "607808 Examples seen. Accuracy:0.9254 Error: 0.25173 Loss:0.22797 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.71s\n", - "608448 Examples seen. Accuracy:0.9248 Error: 0.11139 Loss:0.07290 Threads: 8 Forward time: 4.97s Backward time: 3.36s Step time: 3.70s\n", - "609088 Examples seen. Accuracy:0.9246 Error: 0.27117 Loss:0.27656 Threads: 8 Forward time: 4.95s Backward time: 3.36s Step time: 3.70s\n", - "609728 Examples seen. Accuracy:0.9260 Error: 0.08587 Loss:0.05641 Threads: 8 Forward time: 4.98s Backward time: 3.36s Step time: 3.70s\n", - "610368 Examples seen. Accuracy:0.9270 Error: 0.24453 Loss:0.32479 Threads: 8 Forward time: 5.01s Backward time: 3.40s Step time: 3.73s\n", - "611008 Examples seen. Accuracy:0.9248 Error: 0.32945 Loss:0.47009 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.73s\n", - "611648 Examples seen. Accuracy:0.9256 Error: 0.22456 Loss:0.23669 Threads: 8 Forward time: 5.05s Backward time: 3.38s Step time: 3.76s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "612288 Examples seen. Accuracy:0.9245 Error: 0.24345 Loss:0.28888 Threads: 8 Forward time: 4.99s Backward time: 3.38s Step time: 3.74s\n", - "612928 Examples seen. Accuracy:0.9232 Error: 0.33060 Loss:0.44335 Threads: 8 Forward time: 5.06s Backward time: 3.40s Step time: 3.73s\n", - "613568 Examples seen. Accuracy:0.9238 Error: 0.18371 Loss:0.17017 Threads: 8 Forward time: 5.08s Backward time: 3.37s Step time: 3.73s\n", - "614208 Examples seen. Accuracy:0.9242 Error: 0.14690 Loss:0.11882 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 4.40s\n", - "614848 Examples seen. Accuracy:0.9242 Error: 0.21024 Loss:0.28923 Threads: 8 Forward time: 4.97s Backward time: 3.35s Step time: 3.70s\n", - "615488 Examples seen. Accuracy:0.9242 Error: 0.13054 Loss:0.11231 Threads: 8 Forward time: 5.01s Backward time: 3.38s Step time: 3.80s\n", - "616128 Examples seen. Accuracy:0.9258 Error: 0.21791 Loss:0.17394 Threads: 8 Forward time: 5.03s Backward time: 3.42s Step time: 3.79s\n", - "616768 Examples seen. Accuracy:0.9258 Error: 0.22210 Loss:0.26650 Threads: 8 Forward time: 5.05s Backward time: 3.38s Step time: 3.75s\n", - "617408 Examples seen. Accuracy:0.9242 Error: 0.17192 Loss:0.14012 Threads: 8 Forward time: 4.93s Backward time: 3.37s Step time: 3.74s\n", - "618048 Examples seen. Accuracy:0.9225 Error: 0.25511 Loss:0.29246 Threads: 8 Forward time: 5.02s Backward time: 3.39s Step time: 3.73s\n", - "618688 Examples seen. Accuracy:0.9211 Error: 0.29894 Loss:0.30944 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.72s\n", - "619328 Examples seen. Accuracy:0.9197 Error: 0.16383 Loss:0.15550 Threads: 8 Forward time: 4.98s Backward time: 3.36s Step time: 3.72s\n", - "619968 Examples seen. Accuracy:0.9218 Error: 0.19566 Loss:0.19699 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.73s\n", - "620608 Examples seen. Accuracy:0.9223 Error: 0.20551 Loss:0.26143 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.80s\n", - "621248 Examples seen. Accuracy:0.9236 Error: 0.20055 Loss:0.17781 Threads: 8 Forward time: 4.96s Backward time: 3.39s Step time: 3.78s\n", - "621888 Examples seen. Accuracy:0.9234 Error: 0.22115 Loss:0.19203 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.73s\n", - "622528 Examples seen. Accuracy:0.9242 Error: 0.11351 Loss:0.18863 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.72s\n", - "623168 Examples seen. Accuracy:0.9247 Error: 0.15576 Loss:0.11963 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.73s\n", - "623808 Examples seen. Accuracy:0.9238 Error: 0.19279 Loss:0.20185 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.73s\n", - "624448 Examples seen. Accuracy:0.9236 Error: 0.17985 Loss:0.14372 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.72s\n", - "625088 Examples seen. Accuracy:0.9241 Error: 0.22430 Loss:0.15522 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.76s\n", - "625728 Examples seen. Accuracy:0.9241 Error: 0.26919 Loss:0.29446 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.71s\n", - "626368 Examples seen. Accuracy:0.9237 Error: 0.12659 Loss:0.08955 Threads: 8 Forward time: 5.01s Backward time: 3.38s Step time: 3.72s\n", - "627008 Examples seen. Accuracy:0.9231 Error: 0.27613 Loss:0.36906 Threads: 8 Forward time: 4.94s Backward time: 3.34s Step time: 3.70s\n", - "627648 Examples seen. Accuracy:0.9245 Error: 0.17259 Loss:0.13135 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 3.69s\n", - "628288 Examples seen. Accuracy:0.9232 Error: 0.28986 Loss:0.45926 Threads: 8 Forward time: 4.92s Backward time: 3.32s Step time: 3.70s\n", - "628928 Examples seen. Accuracy:0.9246 Error: 0.21001 Loss:0.15484 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.67s\n", - "629568 Examples seen. Accuracy:0.9248 Error: 0.10011 Loss:0.10467 Threads: 8 Forward time: 4.94s Backward time: 3.35s Step time: 3.73s\n", - "630208 Examples seen. Accuracy:0.9253 Error: 0.26053 Loss:0.30016 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.77s\n", - "630848 Examples seen. Accuracy:0.9247 Error: 0.13562 Loss:0.09931 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.81s\n", - "631488 Examples seen. Accuracy:0.9234 Error: 0.29311 Loss:0.41956 Threads: 8 Forward time: 5.00s Backward time: 3.35s Step time: 3.77s\n", - "632128 Examples seen. Accuracy:0.9227 Error: 0.32604 Loss:0.32537 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.68s\n", - "632768 Examples seen. Accuracy:0.9234 Error: 0.21469 Loss:0.17809 Threads: 8 Forward time: 4.94s Backward time: 3.34s Step time: 3.71s\n", - "633408 Examples seen. Accuracy:0.9238 Error: 0.19012 Loss:0.20322 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.68s\n", - "634048 Examples seen. Accuracy:0.9231 Error: 0.23476 Loss:0.24449 Threads: 8 Forward time: 5.02s Backward time: 3.39s Step time: 3.73s\n", - "634688 Examples seen. Accuracy:0.9230 Error: 0.24315 Loss:0.22827 Threads: 8 Forward time: 5.03s Backward time: 3.34s Step time: 3.70s\n", - "635328 Examples seen. Accuracy:0.9228 Error: 0.17972 Loss:0.12513 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.73s\n", - "635968 Examples seen. Accuracy:0.9235 Error: 0.24447 Loss:0.23741 Threads: 8 Forward time: 4.97s Backward time: 3.33s Step time: 3.68s\n", - "636608 Examples seen. Accuracy:0.9243 Error: 0.17920 Loss:0.14483 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "637248 Examples seen. Accuracy:0.9234 Error: 0.17000 Loss:0.12153 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.72s\n", - "637888 Examples seen. Accuracy:0.9207 Error: 0.25757 Loss:0.25668 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.81s\n", - "638528 Examples seen. Accuracy:0.9202 Error: 0.24584 Loss:0.42478 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.71s\n", - "639168 Examples seen. Accuracy:0.9214 Error: 0.23140 Loss:0.20725 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.70s\n", - "639808 Examples seen. Accuracy:0.9216 Error: 0.21041 Loss:0.24327 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 3.68s\n", - "640448 Examples seen. Accuracy:0.9233 Error: 0.20852 Loss:0.23347 Threads: 8 Forward time: 4.97s Backward time: 3.35s Step time: 3.69s\n", - "641088 Examples seen. Accuracy:0.9224 Error: 0.28905 Loss:0.25250 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.71s\n", - "641728 Examples seen. Accuracy:0.9209 Error: 0.24816 Loss:0.28803 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.73s\n", - "642368 Examples seen. Accuracy:0.9201 Error: 0.27818 Loss:0.25208 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.71s\n", - "643008 Examples seen. Accuracy:0.9187 Error: 0.29237 Loss:0.25005 Threads: 8 Forward time: 5.21s Backward time: 3.44s Step time: 3.72s\n", - "643648 Examples seen. Accuracy:0.9206 Error: 0.24957 Loss:0.23271 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 4.29s\n", - "644288 Examples seen. Accuracy:0.9192 Error: 0.31494 Loss:0.37084 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.69s\n", - "644928 Examples seen. Accuracy:0.9205 Error: 0.18692 Loss:0.13481 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.70s\n", - "645568 Examples seen. Accuracy:0.9198 Error: 0.28008 Loss:0.31609 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.74s\n", - "646208 Examples seen. Accuracy:0.9192 Error: 0.27794 Loss:0.32324 Threads: 8 Forward time: 4.98s Backward time: 3.36s Step time: 3.72s\n", - "646848 Examples seen. Accuracy:0.9176 Error: 0.25645 Loss:0.19357 Threads: 8 Forward time: 4.93s Backward time: 3.35s Step time: 3.69s\n", - "647488 Examples seen. Accuracy:0.9188 Error: 0.18331 Loss:0.19487 Threads: 8 Forward time: 4.91s Backward time: 3.32s Step time: 3.67s\n", - "648128 Examples seen. Accuracy:0.9190 Error: 0.14423 Loss:0.32105 Threads: 8 Forward time: 4.91s Backward time: 3.33s Step time: 3.68s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 13 Examples seen:648752 Validation Accuracy: 0.9612 Validation Error: 0.1139 Validation Loss: 0.1133 Total time: 72.94min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 13. Working time: 1.22 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "649392 Examples seen. Accuracy:0.9175 Error: 0.26981 Loss:0.25934 Threads: 8 Forward time: 4.96s Backward time: 3.37s Step time: 3.76s\n", - "650032 Examples seen. Accuracy:0.9172 Error: 0.18490 Loss:0.21110 Threads: 8 Forward time: 4.92s Backward time: 3.33s Step time: 3.78s\n", - "650672 Examples seen. Accuracy:0.9186 Error: 0.17890 Loss:0.21293 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.69s\n", - "651312 Examples seen. Accuracy:0.9188 Error: 0.35319 Loss:0.40238 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.73s\n", - "651952 Examples seen. Accuracy:0.9205 Error: 0.22089 Loss:0.26061 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.72s\n", - "652592 Examples seen. Accuracy:0.9211 Error: 0.15760 Loss:0.12604 Threads: 8 Forward time: 5.04s Backward time: 3.34s Step time: 3.73s\n", - "653232 Examples seen. Accuracy:0.9209 Error: 0.27305 Loss:0.43929 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.71s\n", - "653872 Examples seen. Accuracy:0.9202 Error: 0.29223 Loss:0.26970 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.99s\n", - "654512 Examples seen. Accuracy:0.9196 Error: 0.29819 Loss:0.37527 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 4.01s\n", - "655152 Examples seen. Accuracy:0.9193 Error: 0.19897 Loss:0.19135 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 4.17s\n", - "655792 Examples seen. Accuracy:0.9210 Error: 0.14533 Loss:0.10307 Threads: 8 Forward time: 4.97s Backward time: 3.33s Step time: 3.71s\n", - "656432 Examples seen. Accuracy:0.9221 Error: 0.19749 Loss:0.15923 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.72s\n", - "657072 Examples seen. Accuracy:0.9224 Error: 0.24243 Loss:0.29140 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.67s\n", - "657712 Examples seen. Accuracy:0.9245 Error: 0.15944 Loss:0.14549 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.66s\n", - "658352 Examples seen. Accuracy:0.9241 Error: 0.27938 Loss:0.34723 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.66s\n", - "658992 Examples seen. Accuracy:0.9238 Error: 0.21748 Loss:0.16661 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.68s\n", - "659632 Examples seen. Accuracy:0.9251 Error: 0.27197 Loss:0.31927 Threads: 8 Forward time: 5.04s Backward time: 3.36s Step time: 3.69s\n", - "660272 Examples seen. Accuracy:0.9233 Error: 0.31591 Loss:0.44622 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.73s\n", - "660912 Examples seen. Accuracy:0.9225 Error: 0.27785 Loss:0.32595 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.71s\n", - "661552 Examples seen. Accuracy:0.9228 Error: 0.17853 Loss:0.20782 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.73s\n", - "662192 Examples seen. Accuracy:0.9245 Error: 0.24496 Loss:0.24084 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.66s\n", - "662832 Examples seen. Accuracy:0.9246 Error: 0.23309 Loss:0.17473 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.66s\n", - "663472 Examples seen. Accuracy:0.9258 Error: 0.16811 Loss:0.13569 Threads: 8 Forward time: 5.09s Backward time: 3.36s Step time: 3.66s\n", - "664112 Examples seen. Accuracy:0.9248 Error: 0.28688 Loss:0.42061 Threads: 8 Forward time: 5.05s Backward time: 3.35s Step time: 3.72s\n", - "664752 Examples seen. Accuracy:0.9239 Error: 0.26790 Loss:0.27782 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.67s\n", - "665392 Examples seen. Accuracy:0.9231 Error: 0.28737 Loss:0.30998 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.66s\n", - "666032 Examples seen. Accuracy:0.9251 Error: 0.19994 Loss:0.14584 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.69s\n", - "666672 Examples seen. Accuracy:0.9242 Error: 0.15484 Loss:0.17790 Threads: 8 Forward time: 5.02s Backward time: 3.35s Step time: 3.70s\n", - "667312 Examples seen. Accuracy:0.9230 Error: 0.22248 Loss:0.26383 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.69s\n", - "667952 Examples seen. Accuracy:0.9235 Error: 0.17661 Loss:0.15604 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.69s\n", - "668592 Examples seen. Accuracy:0.9228 Error: 0.22582 Loss:0.19423 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 4.29s\n", - "669232 Examples seen. Accuracy:0.9230 Error: 0.19354 Loss:0.18550 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.74s\n", - "669872 Examples seen. Accuracy:0.9235 Error: 0.15994 Loss:0.11418 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.73s\n", - "670512 Examples seen. Accuracy:0.9242 Error: 0.21230 Loss:0.20893 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.72s\n", - "671152 Examples seen. Accuracy:0.9247 Error: 0.17924 Loss:0.18412 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.69s\n", - "671792 Examples seen. Accuracy:0.9246 Error: 0.30468 Loss:0.32864 Threads: 8 Forward time: 4.92s Backward time: 3.34s Step time: 3.74s\n", - "672432 Examples seen. Accuracy:0.9233 Error: 0.21060 Loss:0.21091 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.67s\n", - "673072 Examples seen. Accuracy:0.9224 Error: 0.19358 Loss:0.21268 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.68s\n", - "673712 Examples seen. Accuracy:0.9223 Error: 0.31135 Loss:0.29611 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.68s\n", - "674352 Examples seen. Accuracy:0.9224 Error: 0.23413 Loss:0.25672 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.67s\n", - "674992 Examples seen. Accuracy:0.9249 Error: 0.18648 Loss:0.16315 Threads: 8 Forward time: 5.01s Backward time: 3.35s Step time: 3.73s\n", - "675632 Examples seen. Accuracy:0.9245 Error: 0.21367 Loss:0.20568 Threads: 8 Forward time: 4.96s Backward time: 3.36s Step time: 4.01s\n", - "676272 Examples seen. Accuracy:0.9255 Error: 0.15781 Loss:0.11221 Threads: 8 Forward time: 4.97s Backward time: 3.37s Step time: 3.72s\n", - "676912 Examples seen. Accuracy:0.9257 Error: 0.26989 Loss:0.30329 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.69s\n", - "677552 Examples seen. Accuracy:0.9249 Error: 0.24495 Loss:0.32354 Threads: 8 Forward time: 4.94s Backward time: 3.34s Step time: 3.69s\n", - "678192 Examples seen. Accuracy:0.9253 Error: 0.20756 Loss:0.21230 Threads: 8 Forward time: 4.94s Backward time: 3.37s Step time: 3.68s\n", - "678832 Examples seen. Accuracy:0.9262 Error: 0.15825 Loss:0.15859 Threads: 8 Forward time: 4.98s Backward time: 3.37s Step time: 3.69s\n", - "679472 Examples seen. Accuracy:0.9260 Error: 0.23991 Loss:0.22751 Threads: 8 Forward time: 4.90s Backward time: 3.36s Step time: 3.68s\n", - "680112 Examples seen. Accuracy:0.9262 Error: 0.17398 Loss:0.14430 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.68s\n", - "680752 Examples seen. Accuracy:0.9251 Error: 0.27289 Loss:0.30962 Threads: 8 Forward time: 5.00s Backward time: 3.35s Step time: 3.69s\n", - "681392 Examples seen. Accuracy:0.9256 Error: 0.18921 Loss:0.24007 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.69s\n", - "682032 Examples seen. Accuracy:0.9251 Error: 0.17527 Loss:0.15538 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "682672 Examples seen. Accuracy:0.9246 Error: 0.21379 Loss:0.34513 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.68s\n", - "683312 Examples seen. Accuracy:0.9218 Error: 0.19323 Loss:0.14895 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 3.69s\n", - "683952 Examples seen. Accuracy:0.9201 Error: 0.13746 Loss:0.16388 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.69s\n", - "684592 Examples seen. Accuracy:0.9201 Error: 0.14745 Loss:0.11056 Threads: 8 Forward time: 4.94s Backward time: 3.33s Step time: 3.69s\n", - "685232 Examples seen. Accuracy:0.9212 Error: 0.09946 Loss:0.07089 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.69s\n", - "685872 Examples seen. Accuracy:0.9194 Error: 0.15444 Loss:0.18586 Threads: 8 Forward time: 5.02s Backward time: 3.36s Step time: 3.77s\n", - "686512 Examples seen. Accuracy:0.9198 Error: 0.22131 Loss:0.22950 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.77s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "687152 Examples seen. Accuracy:0.9197 Error: 0.20431 Loss:0.20188 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.74s\n", - "687792 Examples seen. Accuracy:0.9197 Error: 0.20788 Loss:0.18897 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.80s\n", - "688432 Examples seen. Accuracy:0.9209 Error: 0.18472 Loss:0.17458 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.74s\n", - "689072 Examples seen. Accuracy:0.9209 Error: 0.19537 Loss:0.20380 Threads: 8 Forward time: 5.04s Backward time: 3.38s Step time: 3.73s\n", - "689712 Examples seen. Accuracy:0.9186 Error: 0.27032 Loss:0.33264 Threads: 8 Forward time: 5.02s Backward time: 3.39s Step time: 3.77s\n", - "690352 Examples seen. Accuracy:0.9178 Error: 0.28204 Loss:0.29697 Threads: 8 Forward time: 5.00s Backward time: 3.39s Step time: 3.77s\n", - "690992 Examples seen. Accuracy:0.9181 Error: 0.20187 Loss:0.18912 Threads: 8 Forward time: 5.02s Backward time: 3.38s Step time: 3.74s\n", - "691632 Examples seen. Accuracy:0.9179 Error: 0.18593 Loss:0.18831 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.74s\n", - "692272 Examples seen. Accuracy:0.9188 Error: 0.20844 Loss:0.20645 Threads: 8 Forward time: 4.97s Backward time: 3.38s Step time: 3.74s\n", - "692912 Examples seen. Accuracy:0.9194 Error: 0.22447 Loss:0.23672 Threads: 8 Forward time: 4.98s Backward time: 3.37s Step time: 3.73s\n", - "693552 Examples seen. Accuracy:0.9203 Error: 0.22415 Loss:0.20642 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.80s\n", - "694192 Examples seen. Accuracy:0.9202 Error: 0.22189 Loss:0.19755 Threads: 8 Forward time: 5.05s Backward time: 3.36s Step time: 3.78s\n", - "694832 Examples seen. Accuracy:0.9209 Error: 0.10455 Loss:0.11997 Threads: 8 Forward time: 5.03s Backward time: 3.35s Step time: 3.76s\n", - "695472 Examples seen. Accuracy:0.9218 Error: 0.09665 Loss:0.07078 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.78s\n", - "696112 Examples seen. Accuracy:0.9229 Error: 0.18862 Loss:0.15474 Threads: 8 Forward time: 5.07s Backward time: 3.35s Step time: 3.76s\n", - "696752 Examples seen. Accuracy:0.9232 Error: 0.17772 Loss:0.14124 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.77s\n", - "697392 Examples seen. Accuracy:0.9257 Error: 0.23398 Loss:0.21990 Threads: 8 Forward time: 5.05s Backward time: 3.37s Step time: 3.80s\n", - "698032 Examples seen. Accuracy:0.9250 Error: 0.29578 Loss:0.31758 Threads: 8 Forward time: 5.04s Backward time: 3.35s Step time: 4.42s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 14 Examples seen:698656 Validation Accuracy: 0.9634 Validation Error: 0.1034 Validation Loss: 0.1017 Total time: 78.30min\n", - "Epoch time: 5.8 minutes. 100 epochs: 9.6 hours.\n", - "Epochs: 14. Working time: 1.3 hours.\n", - "699296 Examples seen. Accuracy:0.9235 Error: 0.22145 Loss:0.26296 Threads: 8 Forward time: 5.05s Backward time: 3.36s Step time: 3.76s\n", - "699936 Examples seen. Accuracy:0.9252 Error: 0.14014 Loss:0.09814 Threads: 8 Forward time: 5.01s Backward time: 3.39s Step time: 3.77s\n", - "700576 Examples seen. Accuracy:0.9256 Error: 0.30009 Loss:0.47461 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.76s\n", - "701216 Examples seen. Accuracy:0.9230 Error: 0.17862 Loss:0.15909 Threads: 8 Forward time: 4.94s Backward time: 3.33s Step time: 3.73s\n", - "701856 Examples seen. Accuracy:0.9235 Error: 0.25658 Loss:0.28110 Threads: 8 Forward time: 5.02s Backward time: 3.36s Step time: 3.74s\n", - "702496 Examples seen. Accuracy:0.9246 Error: 0.16752 Loss:0.24733 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.72s\n", - "703136 Examples seen. Accuracy:0.9254 Error: 0.23252 Loss:0.30819 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.71s\n", - "703776 Examples seen. Accuracy:0.9254 Error: 0.23237 Loss:0.21500 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.71s\n", - "704416 Examples seen. Accuracy:0.9260 Error: 0.20399 Loss:0.17635 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.75s\n", - "705056 Examples seen. Accuracy:0.9262 Error: 0.19453 Loss:0.22488 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.74s\n", - "705696 Examples seen. Accuracy:0.9277 Error: 0.14425 Loss:0.09093 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.74s\n", - "706336 Examples seen. Accuracy:0.9288 Error: 0.31189 Loss:0.32594 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.72s\n", - "706976 Examples seen. Accuracy:0.9285 Error: 0.13149 Loss:0.09739 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.70s\n", - "707616 Examples seen. Accuracy:0.9284 Error: 0.14378 Loss:0.21577 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.69s\n", - "708256 Examples seen. Accuracy:0.9282 Error: 0.17707 Loss:0.11789 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.70s\n", - "708896 Examples seen. Accuracy:0.9289 Error: 0.20129 Loss:0.22809 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.67s\n", - "709536 Examples seen. Accuracy:0.9288 Error: 0.12164 Loss:0.08824 Threads: 8 Forward time: 4.94s Backward time: 3.35s Step time: 3.67s\n", - "710176 Examples seen. Accuracy:0.9297 Error: 0.24316 Loss:0.26564 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.68s\n", - "710816 Examples seen. Accuracy:0.9283 Error: 0.20906 Loss:0.17778 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.71s\n", - "711456 Examples seen. Accuracy:0.9291 Error: 0.18636 Loss:0.14885 Threads: 8 Forward time: 5.00s Backward time: 3.38s Step time: 3.73s\n", - "712096 Examples seen. Accuracy:0.9277 Error: 0.14190 Loss:0.16963 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.75s\n", - "712736 Examples seen. Accuracy:0.9278 Error: 0.14451 Loss:0.15218 Threads: 8 Forward time: 5.01s Backward time: 3.38s Step time: 3.73s\n", - "713376 Examples seen. Accuracy:0.9261 Error: 0.16294 Loss:0.17784 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.71s\n", - "714016 Examples seen. Accuracy:0.9250 Error: 0.23963 Loss:0.24820 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.67s\n", - "714656 Examples seen. Accuracy:0.9250 Error: 0.11989 Loss:0.08585 Threads: 8 Forward time: 4.90s Backward time: 3.31s Step time: 3.68s\n", - "715296 Examples seen. Accuracy:0.9252 Error: 0.12969 Loss:0.12236 Threads: 8 Forward time: 4.96s Backward time: 3.34s Step time: 3.67s\n", - "715936 Examples seen. Accuracy:0.9257 Error: 0.12528 Loss:0.11375 Threads: 8 Forward time: 4.92s Backward time: 3.33s Step time: 3.67s\n", - "716576 Examples seen. Accuracy:0.9253 Error: 0.18179 Loss:0.36416 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.68s\n", - "717216 Examples seen. Accuracy:0.9264 Error: 0.21959 Loss:0.21867 Threads: 8 Forward time: 5.09s Backward time: 3.38s Step time: 3.70s\n", - "717856 Examples seen. Accuracy:0.9278 Error: 0.18928 Loss:0.13191 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.71s\n", - "718496 Examples seen. Accuracy:0.9263 Error: 0.22639 Loss:0.24938 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.69s\n", - "719136 Examples seen. Accuracy:0.9265 Error: 0.28791 Loss:0.29771 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.69s\n", - "719776 Examples seen. Accuracy:0.9274 Error: 0.24713 Loss:0.32485 Threads: 8 Forward time: 5.00s Backward time: 3.36s Step time: 3.70s\n", - "720416 Examples seen. Accuracy:0.9276 Error: 0.18207 Loss:0.17420 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.68s\n", - "721056 Examples seen. Accuracy:0.9286 Error: 0.20232 Loss:0.16635 Threads: 8 Forward time: 5.10s Backward time: 3.41s Step time: 3.81s\n", - "721696 Examples seen. Accuracy:0.9303 Error: 0.18548 Loss:0.18271 Threads: 8 Forward time: 5.10s Backward time: 3.36s Step time: 3.72s\n", - "722336 Examples seen. Accuracy:0.9307 Error: 0.18014 Loss:0.17023 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.72s\n", - "722976 Examples seen. Accuracy:0.9304 Error: 0.25271 Loss:0.26551 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.71s\n", - "723616 Examples seen. Accuracy:0.9295 Error: 0.15825 Loss:0.16284 Threads: 8 Forward time: 4.96s Backward time: 3.32s Step time: 3.70s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "724256 Examples seen. Accuracy:0.9309 Error: 0.20303 Loss:0.22516 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.71s\n", - "724896 Examples seen. Accuracy:0.9299 Error: 0.15713 Loss:0.13265 Threads: 8 Forward time: 5.06s Backward time: 3.32s Step time: 3.70s\n", - "725536 Examples seen. Accuracy:0.9316 Error: 0.11935 Loss:0.11442 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 4.36s\n", - "726176 Examples seen. Accuracy:0.9320 Error: 0.06989 Loss:0.04916 Threads: 8 Forward time: 4.97s Backward time: 3.34s Step time: 4.04s\n", - "726816 Examples seen. Accuracy:0.9312 Error: 0.24818 Loss:0.29031 Threads: 8 Forward time: 5.00s Backward time: 3.35s Step time: 3.78s\n", - "727456 Examples seen. Accuracy:0.9311 Error: 0.23920 Loss:0.30601 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.82s\n", - "728096 Examples seen. Accuracy:0.9302 Error: 0.23040 Loss:0.21368 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.69s\n", - "728736 Examples seen. Accuracy:0.9275 Error: 0.28287 Loss:0.27504 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "729376 Examples seen. Accuracy:0.9271 Error: 0.26658 Loss:0.32178 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.67s\n", - "730016 Examples seen. Accuracy:0.9272 Error: 0.05861 Loss:0.04194 Threads: 8 Forward time: 4.91s Backward time: 3.33s Step time: 3.68s\n", - "730656 Examples seen. Accuracy:0.9296 Error: 0.12672 Loss:0.08934 Threads: 8 Forward time: 5.04s Backward time: 3.35s Step time: 3.68s\n", - "731296 Examples seen. Accuracy:0.9292 Error: 0.20098 Loss:0.24593 Threads: 8 Forward time: 5.00s Backward time: 3.37s Step time: 3.73s\n", - "731936 Examples seen. Accuracy:0.9301 Error: 0.18390 Loss:0.16926 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.67s\n", - "732576 Examples seen. Accuracy:0.9311 Error: 0.18309 Loss:0.17629 Threads: 8 Forward time: 5.09s Backward time: 3.45s Step time: 3.72s\n", - "733216 Examples seen. Accuracy:0.9304 Error: 0.11153 Loss:0.09986 Threads: 8 Forward time: 4.95s Backward time: 3.37s Step time: 3.71s\n", - "733856 Examples seen. Accuracy:0.9311 Error: 0.18930 Loss:0.19622 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.69s\n", - "734496 Examples seen. Accuracy:0.9290 Error: 0.15428 Loss:0.22344 Threads: 8 Forward time: 4.98s Backward time: 3.35s Step time: 3.67s\n", - "735136 Examples seen. Accuracy:0.9263 Error: 0.17778 Loss:0.12305 Threads: 8 Forward time: 4.94s Backward time: 3.35s Step time: 3.71s\n", - "735776 Examples seen. Accuracy:0.9274 Error: 0.15163 Loss:0.11392 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.69s\n", - "736416 Examples seen. Accuracy:0.9273 Error: 0.16412 Loss:0.18380 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.68s\n", - "737056 Examples seen. Accuracy:0.9268 Error: 0.30519 Loss:0.31991 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.66s\n", - "737696 Examples seen. Accuracy:0.9262 Error: 0.21756 Loss:0.31419 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.67s\n", - "738336 Examples seen. Accuracy:0.9256 Error: 0.25406 Loss:0.24444 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.66s\n", - "738976 Examples seen. Accuracy:0.9270 Error: 0.11830 Loss:0.10572 Threads: 8 Forward time: 4.89s Backward time: 3.32s Step time: 3.66s\n", - "739616 Examples seen. Accuracy:0.9280 Error: 0.30708 Loss:0.32502 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.66s\n", - "740256 Examples seen. Accuracy:0.9275 Error: 0.19924 Loss:0.30101 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.66s\n", - "740896 Examples seen. Accuracy:0.9268 Error: 0.17401 Loss:0.25244 Threads: 8 Forward time: 4.92s Backward time: 3.35s Step time: 3.66s\n", - "741536 Examples seen. Accuracy:0.9267 Error: 0.24023 Loss:0.23901 Threads: 8 Forward time: 4.96s Backward time: 3.42s Step time: 3.70s\n", - "742176 Examples seen. Accuracy:0.9269 Error: 0.24492 Loss:0.21680 Threads: 8 Forward time: 4.93s Backward time: 3.36s Step time: 3.69s\n", - "742816 Examples seen. Accuracy:0.9278 Error: 0.22954 Loss:0.20757 Threads: 8 Forward time: 4.94s Backward time: 3.33s Step time: 3.67s\n", - "743456 Examples seen. Accuracy:0.9256 Error: 0.34231 Loss:0.50278 Threads: 8 Forward time: 4.90s Backward time: 3.31s Step time: 3.70s\n", - "744096 Examples seen. Accuracy:0.9246 Error: 0.36100 Loss:0.41389 Threads: 8 Forward time: 4.92s Backward time: 3.34s Step time: 3.73s\n", - "744736 Examples seen. Accuracy:0.9255 Error: 0.16333 Loss:0.11708 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.65s\n", - "745376 Examples seen. Accuracy:0.9258 Error: 0.21078 Loss:0.25444 Threads: 8 Forward time: 4.92s Backward time: 3.32s Step time: 3.64s\n", - "746016 Examples seen. Accuracy:0.9247 Error: 0.19289 Loss:0.15980 Threads: 8 Forward time: 4.91s Backward time: 3.32s Step time: 3.65s\n", - "746656 Examples seen. Accuracy:0.9250 Error: 0.19259 Loss:0.19599 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.68s\n", - "747296 Examples seen. Accuracy:0.9250 Error: 0.13941 Loss:0.15193 Threads: 8 Forward time: 5.03s Backward time: 3.34s Step time: 3.71s\n", - "747936 Examples seen. Accuracy:0.9250 Error: 0.16013 Loss:0.18090 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.67s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 15 Examples seen:748560 Validation Accuracy: 0.9652 Validation Error: 0.0958 Validation Loss: 0.0955 Total time: 83.60min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 15. Working time: 1.39 hours.\n", - "749200 Examples seen. Accuracy:0.9228 Error: 0.15590 Loss:0.13903 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.74s\n", - "749840 Examples seen. Accuracy:0.9228 Error: 0.14768 Loss:0.13154 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.70s\n", - "750480 Examples seen. Accuracy:0.9237 Error: 0.13735 Loss:0.12060 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 4.28s\n", - "751120 Examples seen. Accuracy:0.9237 Error: 0.29119 Loss:0.40009 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.69s\n", - "751760 Examples seen. Accuracy:0.9236 Error: 0.13724 Loss:0.08911 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.68s\n", - "752400 Examples seen. Accuracy:0.9234 Error: 0.23360 Loss:0.19725 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.67s\n", - "753040 Examples seen. Accuracy:0.9242 Error: 0.20335 Loss:0.17017 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.68s\n", - "753680 Examples seen. Accuracy:0.9253 Error: 0.24722 Loss:0.24969 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.70s\n", - "754320 Examples seen. Accuracy:0.9272 Error: 0.19458 Loss:0.20635 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.70s\n", - "754960 Examples seen. Accuracy:0.9266 Error: 0.23492 Loss:0.27245 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.70s\n", - "755600 Examples seen. Accuracy:0.9267 Error: 0.16444 Loss:0.15543 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.66s\n", - "756240 Examples seen. Accuracy:0.9267 Error: 0.21158 Loss:0.14912 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.67s\n", - "756880 Examples seen. Accuracy:0.9262 Error: 0.18051 Loss:0.16552 Threads: 8 Forward time: 4.92s Backward time: 3.31s Step time: 3.67s\n", - "757520 Examples seen. Accuracy:0.9250 Error: 0.19389 Loss:0.17590 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.72s\n", - "758160 Examples seen. Accuracy:0.9236 Error: 0.21560 Loss:0.24763 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.65s\n", - "758800 Examples seen. Accuracy:0.9240 Error: 0.14095 Loss:0.14267 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.66s\n", - "759440 Examples seen. Accuracy:0.9258 Error: 0.17888 Loss:0.17523 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.67s\n", - "760080 Examples seen. Accuracy:0.9258 Error: 0.20145 Loss:0.17130 Threads: 8 Forward time: 4.90s Backward time: 3.33s Step time: 3.68s\n", - "760720 Examples seen. Accuracy:0.9253 Error: 0.28773 Loss:0.26773 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "761360 Examples seen. Accuracy:0.9254 Error: 0.18094 Loss:0.17660 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n", - "762000 Examples seen. Accuracy:0.9264 Error: 0.13423 Loss:0.16859 Threads: 8 Forward time: 4.90s Backward time: 3.29s Step time: 3.65s\n", - "762640 Examples seen. Accuracy:0.9269 Error: 0.24560 Loss:0.25528 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.67s\n", - "763280 Examples seen. Accuracy:0.9269 Error: 0.18335 Loss:0.16227 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.68s\n", - "763920 Examples seen. Accuracy:0.9265 Error: 0.24932 Loss:0.21984 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "764560 Examples seen. Accuracy:0.9277 Error: 0.17412 Loss:0.19422 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.67s\n", - "765200 Examples seen. Accuracy:0.9281 Error: 0.28186 Loss:0.22473 Threads: 8 Forward time: 4.97s Backward time: 3.33s Step time: 3.70s\n", - "765840 Examples seen. Accuracy:0.9297 Error: 0.14800 Loss:0.21123 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.68s\n", - "766480 Examples seen. Accuracy:0.9285 Error: 0.21399 Loss:0.21713 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.69s\n", - "767120 Examples seen. Accuracy:0.9299 Error: 0.15176 Loss:0.12546 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.66s\n", - "767760 Examples seen. Accuracy:0.9289 Error: 0.16088 Loss:0.17257 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.66s\n", - "768400 Examples seen. Accuracy:0.9288 Error: 0.24400 Loss:0.30570 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.74s\n", - "769040 Examples seen. Accuracy:0.9278 Error: 0.23058 Loss:0.32038 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.65s\n", - "769680 Examples seen. Accuracy:0.9265 Error: 0.24857 Loss:0.20830 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.65s\n", - "770320 Examples seen. Accuracy:0.9256 Error: 0.14352 Loss:0.11134 Threads: 8 Forward time: 4.90s Backward time: 3.32s Step time: 3.66s\n", - "770960 Examples seen. Accuracy:0.9270 Error: 0.17809 Loss:0.21109 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.66s\n", - "771600 Examples seen. Accuracy:0.9254 Error: 0.24045 Loss:0.35792 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.69s\n", - "772240 Examples seen. Accuracy:0.9256 Error: 0.23819 Loss:0.31171 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.69s\n", - "772880 Examples seen. Accuracy:0.9265 Error: 0.22070 Loss:0.17204 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.73s\n", - "773520 Examples seen. Accuracy:0.9271 Error: 0.16275 Loss:0.13333 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.67s\n", - "774160 Examples seen. Accuracy:0.9261 Error: 0.19483 Loss:0.17396 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.69s\n", - "774800 Examples seen. Accuracy:0.9267 Error: 0.13429 Loss:0.12911 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.68s\n", - "775440 Examples seen. Accuracy:0.9267 Error: 0.21414 Loss:0.18982 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 4.31s\n", - "776080 Examples seen. Accuracy:0.9267 Error: 0.24838 Loss:0.20335 Threads: 8 Forward time: 5.19s Backward time: 3.44s Step time: 5.11s\n", - "776720 Examples seen. Accuracy:0.9280 Error: 0.17126 Loss:0.13963 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 4.20s\n", - "777360 Examples seen. Accuracy:0.9293 Error: 0.13795 Loss:0.13239 Threads: 8 Forward time: 5.03s Backward time: 3.30s Step time: 4.57s\n", - "778000 Examples seen. Accuracy:0.9295 Error: 0.17199 Loss:0.16685 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 4.66s\n", - "778640 Examples seen. Accuracy:0.9291 Error: 0.19896 Loss:0.17809 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.87s\n", - "779280 Examples seen. Accuracy:0.9286 Error: 0.17767 Loss:0.17971 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 3.69s\n", - "779920 Examples seen. Accuracy:0.9282 Error: 0.14948 Loss:0.12814 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 4.33s\n", - "780560 Examples seen. Accuracy:0.9277 Error: 0.14642 Loss:0.12847 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.71s\n", - "781200 Examples seen. Accuracy:0.9275 Error: 0.16637 Loss:0.16562 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.73s\n", - "781840 Examples seen. Accuracy:0.9289 Error: 0.18915 Loss:0.22348 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.71s\n", - "782480 Examples seen. Accuracy:0.9289 Error: 0.19786 Loss:0.19222 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.71s\n", - "783120 Examples seen. Accuracy:0.9288 Error: 0.23412 Loss:0.29708 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.70s\n", - "783760 Examples seen. Accuracy:0.9301 Error: 0.19054 Loss:0.18742 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.73s\n", - "784400 Examples seen. Accuracy:0.9283 Error: 0.36893 Loss:0.46569 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.75s\n", - "785040 Examples seen. Accuracy:0.9282 Error: 0.22870 Loss:0.23232 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.74s\n", - "785680 Examples seen. Accuracy:0.9287 Error: 0.15320 Loss:0.12290 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "786320 Examples seen. Accuracy:0.9282 Error: 0.16937 Loss:0.20100 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.73s\n", - "786960 Examples seen. Accuracy:0.9266 Error: 0.27295 Loss:0.29034 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.74s\n", - "787600 Examples seen. Accuracy:0.9261 Error: 0.17976 Loss:0.17512 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.75s\n", - "788240 Examples seen. Accuracy:0.9260 Error: 0.07664 Loss:0.06292 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.77s\n", - "788880 Examples seen. Accuracy:0.9273 Error: 0.11200 Loss:0.08151 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.74s\n", - "789520 Examples seen. Accuracy:0.9271 Error: 0.16134 Loss:0.23983 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.73s\n", - "790160 Examples seen. Accuracy:0.9277 Error: 0.19006 Loss:0.15261 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.72s\n", - "790800 Examples seen. Accuracy:0.9309 Error: 0.19073 Loss:0.20922 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.71s\n", - "791440 Examples seen. Accuracy:0.9315 Error: 0.11079 Loss:0.11771 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.72s\n", - "792080 Examples seen. Accuracy:0.9315 Error: 0.19563 Loss:0.18784 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.70s\n", - "792720 Examples seen. Accuracy:0.9309 Error: 0.12118 Loss:0.07905 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.71s\n", - "793360 Examples seen. Accuracy:0.9299 Error: 0.26418 Loss:0.39082 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.72s\n", - "794000 Examples seen. Accuracy:0.9308 Error: 0.16303 Loss:0.13526 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.71s\n", - "794640 Examples seen. Accuracy:0.9294 Error: 0.19839 Loss:0.17040 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.71s\n", - "795280 Examples seen. Accuracy:0.9312 Error: 0.17635 Loss:0.20949 Threads: 8 Forward time: 5.01s Backward time: 3.37s Step time: 3.75s\n", - "795920 Examples seen. Accuracy:0.9312 Error: 0.17916 Loss:0.14813 Threads: 8 Forward time: 4.92s Backward time: 3.32s Step time: 3.72s\n", - "796560 Examples seen. Accuracy:0.9291 Error: 0.25460 Loss:0.22747 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.70s\n", - "797200 Examples seen. Accuracy:0.9289 Error: 0.22125 Loss:0.23003 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.71s\n", - "797840 Examples seen. Accuracy:0.9296 Error: 0.21544 Loss:0.20087 Threads: 8 Forward time: 5.04s Backward time: 3.36s Step time: 3.72s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epochs: 16 Examples seen:798464 Validation Accuracy: 0.9674 Validation Error: 0.0888 Validation Loss: 0.0881 Total time: 88.98min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.359 Min Weight: -0.303 Max Output: 5.221 Min Output: -5.160 TNNetConvolutionLinear 66,66,64 Times: 8.52s 0.40s Parent:0\n", - "Layer 2 Max Output: 5.221 Min Output: -2.959 TNNetMaxPool 33,33,64 Times: 3.61s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.569 Min Weight: 0.270 Max Output: 8.702 Min Output: -5.675 TNNetMovingStdNormalization 33,33,64 Times: 0.27s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.265 Min Weight: -0.199 Max Output: 9.768 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.83s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.330 Min Weight: -0.325 Max Output: 11.329 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.82s 0.18s Parent:4\n", - "Layer 6 Max Output: 11.329 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.49s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.302 Min Weight: -0.237 Max Output: 6.427 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.240 Min Weight: -0.225 Max Output: 6.061 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.47s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.211 Min Weight: -0.201 Max Output: 7.872 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.45s 0.02s Parent:8\n", - "Layer 10 Max Output: 7.872 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.00s 0.00s Parent:9\n", - "Layer 11 Max Output: 7.872 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.366 Min Weight: -0.331 Max Output: 22.108 Min Output: -13.015 TNNetFullConnectLinear 39,1,1 Times: 0.04s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.8 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 16. Working time: 1.48 hours.\n", - "799104 Examples seen. Accuracy:0.9279 Error: 0.28146 Loss:0.52051 Threads: 8 Forward time: 4.93s Backward time: 3.33s Step time: 3.69s\n", - "799744 Examples seen. Accuracy:0.9269 Error: 0.24391 Loss:0.27840 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "800384 Examples seen. Accuracy:0.9273 Error: 0.25354 Loss:0.31023 Threads: 8 Forward time: 5.05s Backward time: 3.31s Step time: 3.70s\n", - "801024 Examples seen. Accuracy:0.9255 Error: 0.22366 Loss:0.31534 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.73s\n", - "801664 Examples seen. Accuracy:0.9235 Error: 0.24743 Loss:0.43041 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.69s\n", - "802304 Examples seen. Accuracy:0.9227 Error: 0.24070 Loss:0.21201 Threads: 8 Forward time: 4.96s Backward time: 3.34s Step time: 3.70s\n", - "802944 Examples seen. Accuracy:0.9229 Error: 0.31073 Loss:0.38355 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.70s\n", - "803584 Examples seen. Accuracy:0.9206 Error: 0.34427 Loss:0.45807 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.70s\n", - "804224 Examples seen. Accuracy:0.9221 Error: 0.14951 Loss:0.13564 Threads: 8 Forward time: 5.13s Backward time: 3.34s Step time: 4.28s\n", - "804864 Examples seen. Accuracy:0.9220 Error: 0.27017 Loss:0.26060 Threads: 8 Forward time: 4.95s Backward time: 3.34s Step time: 3.71s\n", - "805504 Examples seen. Accuracy:0.9228 Error: 0.14546 Loss:0.12689 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.68s\n", - "806144 Examples seen. Accuracy:0.9221 Error: 0.24039 Loss:0.26874 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.67s\n", - "806784 Examples seen. Accuracy:0.9251 Error: 0.11481 Loss:0.08027 Threads: 8 Forward time: 4.97s Backward time: 3.34s Step time: 3.67s\n", - "807424 Examples seen. Accuracy:0.9232 Error: 0.28140 Loss:0.32225 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.66s\n", - "808064 Examples seen. Accuracy:0.9228 Error: 0.21544 Loss:0.24749 Threads: 8 Forward time: 4.96s Backward time: 3.35s Step time: 3.67s\n", - "808704 Examples seen. Accuracy:0.9233 Error: 0.24415 Loss:0.22534 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.68s\n", - "809344 Examples seen. Accuracy:0.9236 Error: 0.18281 Loss:0.19059 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.67s\n", - "809984 Examples seen. Accuracy:0.9237 Error: 0.29003 Loss:0.30573 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.67s\n", - "810624 Examples seen. Accuracy:0.9254 Error: 0.16253 Loss:0.13091 Threads: 8 Forward time: 4.94s Backward time: 3.36s Step time: 3.69s\n", - "811264 Examples seen. Accuracy:0.9270 Error: 0.24708 Loss:0.22703 Threads: 8 Forward time: 4.96s Backward time: 3.36s Step time: 3.69s\n", - "811904 Examples seen. Accuracy:0.9233 Error: 0.18024 Loss:0.27344 Threads: 8 Forward time: 4.99s Backward time: 3.36s Step time: 3.69s\n", - "812544 Examples seen. Accuracy:0.9192 Error: 0.35274 Loss:0.37108 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.68s\n", - "813184 Examples seen. Accuracy:0.9198 Error: 0.16868 Loss:0.14000 Threads: 8 Forward time: 5.00s Backward time: 3.34s Step time: 3.69s\n", - "813824 Examples seen. Accuracy:0.9207 Error: 0.21982 Loss:0.17579 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.67s\n", - "814464 Examples seen. Accuracy:0.9218 Error: 0.09917 Loss:0.08106 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.71s\n", - "815104 Examples seen. Accuracy:0.9210 Error: 0.31732 Loss:0.45190 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.70s\n", - "815744 Examples seen. Accuracy:0.9233 Error: 0.15465 Loss:0.12987 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.66s\n", - "816384 Examples seen. Accuracy:0.9228 Error: 0.11111 Loss:0.08842 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.67s\n", - "817024 Examples seen. Accuracy:0.9233 Error: 0.33055 Loss:0.41229 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.66s\n", - "817664 Examples seen. Accuracy:0.9247 Error: 0.20235 Loss:0.14343 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.64s\n", - "818304 Examples seen. Accuracy:0.9260 Error: 0.10952 Loss:0.07429 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.65s\n", - "818944 Examples seen. Accuracy:0.9269 Error: 0.15545 Loss:0.11444 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.65s\n", - "819584 Examples seen. Accuracy:0.9286 Error: 0.18680 Loss:0.18544 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.65s\n", - "820224 Examples seen. Accuracy:0.9287 Error: 0.18136 Loss:0.21638 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.68s\n", - "820864 Examples seen. Accuracy:0.9300 Error: 0.16558 Loss:0.11525 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.67s\n", - "821504 Examples seen. Accuracy:0.9314 Error: 0.13345 Loss:0.16936 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.65s\n", - "822144 Examples seen. Accuracy:0.9321 Error: 0.25381 Loss:0.28719 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.66s\n", - "822784 Examples seen. Accuracy:0.9319 Error: 0.24331 Loss:0.35148 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.65s\n", - "823424 Examples seen. Accuracy:0.9299 Error: 0.40296 Loss:0.49650 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.66s\n", - "824064 Examples seen. Accuracy:0.9288 Error: 0.16747 Loss:0.11808 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.65s\n", - "824704 Examples seen. Accuracy:0.9283 Error: 0.18405 Loss:0.17831 Threads: 8 Forward time: 4.91s Backward time: 3.32s Step time: 3.65s\n", - "825344 Examples seen. Accuracy:0.9276 Error: 0.20185 Loss:0.21823 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "825984 Examples seen. Accuracy:0.9280 Error: 0.18283 Loss:0.18587 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.69s\n", - "826624 Examples seen. Accuracy:0.9286 Error: 0.19055 Loss:0.18149 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "827264 Examples seen. Accuracy:0.9293 Error: 0.12547 Loss:0.08409 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.69s\n", - "827904 Examples seen. Accuracy:0.9289 Error: 0.17753 Loss:0.20191 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.66s\n", - "828544 Examples seen. Accuracy:0.9292 Error: 0.13984 Loss:0.10963 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.68s\n", - "829184 Examples seen. Accuracy:0.9292 Error: 0.18744 Loss:0.18920 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.71s\n", - "829824 Examples seen. Accuracy:0.9282 Error: 0.19414 Loss:0.16085 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.66s\n", - "830464 Examples seen. Accuracy:0.9283 Error: 0.05182 Loss:0.02891 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.66s\n", - "831104 Examples seen. Accuracy:0.9301 Error: 0.10136 Loss:0.06753 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.67s\n", - "831744 Examples seen. Accuracy:0.9312 Error: 0.15923 Loss:0.16901 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.68s\n", - "832384 Examples seen. Accuracy:0.9294 Error: 0.33872 Loss:0.42488 Threads: 8 Forward time: 4.96s Backward time: 3.33s Step time: 3.68s\n", - "833024 Examples seen. Accuracy:0.9276 Error: 0.26633 Loss:0.33095 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.68s\n", - "833664 Examples seen. Accuracy:0.9280 Error: 0.17941 Loss:0.13564 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.68s\n", - "834304 Examples seen. Accuracy:0.9288 Error: 0.20437 Loss:0.26723 Threads: 8 Forward time: 5.15s Backward time: 3.37s Step time: 4.28s\n", - "834944 Examples seen. Accuracy:0.9290 Error: 0.21144 Loss:0.22799 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.79s\n", - "835584 Examples seen. Accuracy:0.9275 Error: 0.15827 Loss:0.10988 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.72s\n", - "836224 Examples seen. Accuracy:0.9269 Error: 0.23111 Loss:0.27598 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.84s\n", - "836864 Examples seen. Accuracy:0.9282 Error: 0.16234 Loss:0.11793 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.74s\n", - "837504 Examples seen. Accuracy:0.9290 Error: 0.20848 Loss:0.19009 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.70s\n", - "838144 Examples seen. Accuracy:0.9293 Error: 0.11908 Loss:0.08998 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.70s\n", - "838784 Examples seen. Accuracy:0.9307 Error: 0.10253 Loss:0.06417 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.68s\n", - "839424 Examples seen. Accuracy:0.9309 Error: 0.08945 Loss:0.07415 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.76s\n", - "840064 Examples seen. Accuracy:0.9303 Error: 0.29540 Loss:0.26998 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.66s\n", - "840704 Examples seen. Accuracy:0.9303 Error: 0.16691 Loss:0.18135 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.65s\n", - "841344 Examples seen. Accuracy:0.9305 Error: 0.09373 Loss:0.07867 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.69s\n", - "841984 Examples seen. Accuracy:0.9319 Error: 0.16894 Loss:0.21846 Threads: 8 Forward time: 4.98s Backward time: 3.34s Step time: 3.70s\n", - "842624 Examples seen. Accuracy:0.9334 Error: 0.12858 Loss:0.14987 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.69s\n", - "843264 Examples seen. Accuracy:0.9344 Error: 0.15648 Loss:0.13730 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.69s\n", - "843904 Examples seen. Accuracy:0.9337 Error: 0.25002 Loss:0.25660 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.70s\n", - "844544 Examples seen. Accuracy:0.9346 Error: 0.08790 Loss:0.12666 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.67s\n", - "845184 Examples seen. Accuracy:0.9340 Error: 0.23597 Loss:0.31651 Threads: 8 Forward time: 4.90s Backward time: 3.30s Step time: 3.68s\n", - "845824 Examples seen. Accuracy:0.9342 Error: 0.13486 Loss:0.09496 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.61s\n", - "846464 Examples seen. Accuracy:0.9329 Error: 0.08714 Loss:0.14212 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.60s\n", - "847104 Examples seen. Accuracy:0.9330 Error: 0.23075 Loss:0.31477 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "847744 Examples seen. Accuracy:0.9337 Error: 0.13689 Loss:0.18215 Threads: 8 Forward time: 4.99s Backward time: 3.33s Step time: 3.68s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 17 Examples seen:848368 Validation Accuracy: 0.9692 Validation Error: 0.0837 Validation Loss: 0.0860 Total time: 94.26min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 17. Working time: 1.57 hours.\n", - "849008 Examples seen. Accuracy:0.9315 Error: 0.20078 Loss:0.24331 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.69s\n", - "849648 Examples seen. Accuracy:0.9311 Error: 0.15992 Loss:0.14799 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.65s\n", - "850288 Examples seen. Accuracy:0.9318 Error: 0.22830 Loss:0.19558 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.69s\n", - "850928 Examples seen. Accuracy:0.9310 Error: 0.29294 Loss:0.32634 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.80s\n", - "851568 Examples seen. Accuracy:0.9301 Error: 0.23995 Loss:0.18865 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.66s\n", - "852208 Examples seen. Accuracy:0.9303 Error: 0.14758 Loss:0.12296 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.66s\n", - "852848 Examples seen. Accuracy:0.9311 Error: 0.14358 Loss:0.10642 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.65s\n", - "853488 Examples seen. Accuracy:0.9312 Error: 0.19626 Loss:0.19077 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.66s\n", - "854128 Examples seen. Accuracy:0.9325 Error: 0.11741 Loss:0.08145 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.66s\n", - "854768 Examples seen. Accuracy:0.9322 Error: 0.19403 Loss:0.16102 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.64s\n", - "855408 Examples seen. Accuracy:0.9318 Error: 0.17729 Loss:0.15791 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 3.67s\n", - "856048 Examples seen. Accuracy:0.9315 Error: 0.15296 Loss:0.15293 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.64s\n", - "856688 Examples seen. Accuracy:0.9334 Error: 0.10373 Loss:0.10627 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.65s\n", - "857328 Examples seen. Accuracy:0.9330 Error: 0.22403 Loss:0.28188 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.67s\n", - "857968 Examples seen. Accuracy:0.9328 Error: 0.33263 Loss:0.35806 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.65s\n", - "858608 Examples seen. Accuracy:0.9338 Error: 0.18441 Loss:0.17305 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.65s\n", - "859248 Examples seen. Accuracy:0.9343 Error: 0.13627 Loss:0.12101 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 4.29s\n", - "859888 Examples seen. Accuracy:0.9335 Error: 0.19746 Loss:0.21544 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.76s\n", - "860528 Examples seen. Accuracy:0.9315 Error: 0.14519 Loss:0.19130 Threads: 8 Forward time: 4.99s Backward time: 3.37s Step time: 3.78s\n", - "861168 Examples seen. Accuracy:0.9310 Error: 0.23647 Loss:0.23213 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.78s\n", - "861808 Examples seen. Accuracy:0.9338 Error: 0.12715 Loss:0.10385 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.70s\n", - "862448 Examples seen. Accuracy:0.9336 Error: 0.15821 Loss:0.13975 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "863088 Examples seen. Accuracy:0.9347 Error: 0.17543 Loss:0.12809 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.70s\n", - "863728 Examples seen. Accuracy:0.9347 Error: 0.21422 Loss:0.22431 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.71s\n", - "864368 Examples seen. Accuracy:0.9336 Error: 0.30127 Loss:0.35847 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.73s\n", - "865008 Examples seen. Accuracy:0.9342 Error: 0.12342 Loss:0.12423 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.73s\n", - "865648 Examples seen. Accuracy:0.9339 Error: 0.23570 Loss:0.18630 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.69s\n", - "866288 Examples seen. Accuracy:0.9329 Error: 0.16039 Loss:0.12395 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.70s\n", - "866928 Examples seen. Accuracy:0.9327 Error: 0.11549 Loss:0.15805 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.70s\n", - "867568 Examples seen. Accuracy:0.9314 Error: 0.31881 Loss:0.37359 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.71s\n", - "868208 Examples seen. Accuracy:0.9308 Error: 0.20159 Loss:0.15719 Threads: 8 Forward time: 4.95s Backward time: 3.31s Step time: 3.71s\n", - "868848 Examples seen. Accuracy:0.9322 Error: 0.17554 Loss:0.13623 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.70s\n", - "869488 Examples seen. Accuracy:0.9339 Error: 0.19699 Loss:0.16187 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.71s\n", - "870128 Examples seen. Accuracy:0.9344 Error: 0.20704 Loss:0.26117 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.78s\n", - "870768 Examples seen. Accuracy:0.9336 Error: 0.11151 Loss:0.09849 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.72s\n", - "871408 Examples seen. Accuracy:0.9324 Error: 0.15624 Loss:0.15004 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.78s\n", - "872048 Examples seen. Accuracy:0.9330 Error: 0.12650 Loss:0.16674 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.76s\n", - "872688 Examples seen. Accuracy:0.9340 Error: 0.20329 Loss:0.18442 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.73s\n", - "873328 Examples seen. Accuracy:0.9321 Error: 0.15509 Loss:0.10589 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.69s\n", - "873968 Examples seen. Accuracy:0.9326 Error: 0.17602 Loss:0.17291 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.70s\n", - "874608 Examples seen. Accuracy:0.9337 Error: 0.10286 Loss:0.07093 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.69s\n", - "875248 Examples seen. Accuracy:0.9341 Error: 0.12489 Loss:0.08589 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.70s\n", - "875888 Examples seen. Accuracy:0.9335 Error: 0.24831 Loss:0.27215 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.71s\n", - "876528 Examples seen. Accuracy:0.9344 Error: 0.14031 Loss:0.10177 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.67s\n", - "877168 Examples seen. Accuracy:0.9356 Error: 0.21846 Loss:0.29976 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.66s\n", - "877808 Examples seen. Accuracy:0.9346 Error: 0.17239 Loss:0.16957 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.64s\n", - "878448 Examples seen. Accuracy:0.9354 Error: 0.18697 Loss:0.12211 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.65s\n", - "879088 Examples seen. Accuracy:0.9355 Error: 0.05862 Loss:0.03569 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.64s\n", - "879728 Examples seen. Accuracy:0.9363 Error: 0.15455 Loss:0.14466 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.63s\n", - "880368 Examples seen. Accuracy:0.9351 Error: 0.18215 Loss:0.25896 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.67s\n", - "881008 Examples seen. Accuracy:0.9353 Error: 0.21088 Loss:0.18012 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.69s\n", - "881648 Examples seen. Accuracy:0.9351 Error: 0.23289 Loss:0.35030 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.67s\n", - "882288 Examples seen. Accuracy:0.9345 Error: 0.18064 Loss:0.13170 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.79s\n", - "882928 Examples seen. Accuracy:0.9328 Error: 0.18830 Loss:0.19651 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.68s\n", - "883568 Examples seen. Accuracy:0.9319 Error: 0.19475 Loss:0.17783 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.67s\n", - "884208 Examples seen. Accuracy:0.9315 Error: 0.22865 Loss:0.25211 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.71s\n", - "884848 Examples seen. Accuracy:0.9320 Error: 0.22454 Loss:0.34938 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.67s\n", - "885488 Examples seen. Accuracy:0.9325 Error: 0.13374 Loss:0.09170 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.69s\n", - "886128 Examples seen. Accuracy:0.9336 Error: 0.14062 Loss:0.09998 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.65s\n", - "886768 Examples seen. Accuracy:0.9343 Error: 0.12686 Loss:0.09611 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.67s\n", - "887408 Examples seen. Accuracy:0.9354 Error: 0.26648 Loss:0.27128 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.67s\n", - "888048 Examples seen. Accuracy:0.9345 Error: 0.15077 Loss:0.17112 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.69s\n", - "888688 Examples seen. Accuracy:0.9326 Error: 0.17508 Loss:0.24567 Threads: 8 Forward time: 5.20s Backward time: 3.41s Step time: 3.71s\n", - "889328 Examples seen. Accuracy:0.9334 Error: 0.15998 Loss:0.14963 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 4.31s\n", - "889968 Examples seen. Accuracy:0.9327 Error: 0.25797 Loss:0.22629 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.69s\n", - "890608 Examples seen. Accuracy:0.9350 Error: 0.16556 Loss:0.22346 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.68s\n", - "891248 Examples seen. Accuracy:0.9347 Error: 0.19274 Loss:0.17709 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.70s\n", - "891888 Examples seen. Accuracy:0.9336 Error: 0.18390 Loss:0.14382 Threads: 8 Forward time: 4.91s Backward time: 3.28s Step time: 3.74s\n", - "892528 Examples seen. Accuracy:0.9333 Error: 0.21437 Loss:0.23370 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.75s\n", - "893168 Examples seen. Accuracy:0.9339 Error: 0.15086 Loss:0.11176 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.71s\n", - "893808 Examples seen. Accuracy:0.9340 Error: 0.14431 Loss:0.11742 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.67s\n", - "894448 Examples seen. Accuracy:0.9342 Error: 0.15652 Loss:0.17883 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.75s\n", - "895088 Examples seen. Accuracy:0.9341 Error: 0.08559 Loss:0.08442 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.65s\n", - "895728 Examples seen. Accuracy:0.9348 Error: 0.15858 Loss:0.13670 Threads: 8 Forward time: 5.09s Backward time: 3.39s Step time: 3.79s\n", - "896368 Examples seen. Accuracy:0.9354 Error: 0.13787 Loss:0.16702 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.66s\n", - "897008 Examples seen. Accuracy:0.9351 Error: 0.19693 Loss:0.16949 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.66s\n", - "897648 Examples seen. Accuracy:0.9340 Error: 0.14535 Loss:0.10326 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.81s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 18 Examples seen:898272 Validation Accuracy: 0.9721 Validation Error: 0.0786 Validation Loss: 0.0815 Total time: 99.55min\n", - "Epoch time: 5 minutes. 100 epochs: 8.3 hours.\n", - "Epochs: 18. Working time: 1.66 hours.\n", - "898912 Examples seen. Accuracy:0.9332 Error: 0.28565 Loss:0.36515 Threads: 8 Forward time: 4.89s Backward time: 3.31s Step time: 3.66s\n", - "899552 Examples seen. Accuracy:0.9332 Error: 0.19622 Loss:0.23889 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "900192 Examples seen. Accuracy:0.9327 Error: 0.22202 Loss:0.22946 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.63s\n", - "900832 Examples seen. Accuracy:0.9333 Error: 0.25322 Loss:0.27746 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.63s\n", - "901472 Examples seen. Accuracy:0.9333 Error: 0.16254 Loss:0.13168 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.63s\n", - "902112 Examples seen. Accuracy:0.9347 Error: 0.08337 Loss:0.05627 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.62s\n", - "902752 Examples seen. Accuracy:0.9340 Error: 0.16384 Loss:0.24361 Threads: 8 Forward time: 4.93s Backward time: 3.34s Step time: 3.67s\n", - "903392 Examples seen. Accuracy:0.9348 Error: 0.15722 Loss:0.13759 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.63s\n", - "904032 Examples seen. Accuracy:0.9351 Error: 0.14440 Loss:0.22122 Threads: 8 Forward time: 4.91s Backward time: 3.34s Step time: 3.66s\n", - "904672 Examples seen. Accuracy:0.9353 Error: 0.18423 Loss:0.16436 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.65s\n", - "905312 Examples seen. Accuracy:0.9346 Error: 0.19697 Loss:0.14631 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.66s\n", - "905952 Examples seen. Accuracy:0.9350 Error: 0.20111 Loss:0.18979 Threads: 8 Forward time: 5.04s Backward time: 3.33s Step time: 3.70s\n", - "906592 Examples seen. Accuracy:0.9346 Error: 0.15856 Loss:0.11087 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.66s\n", - "907232 Examples seen. Accuracy:0.9358 Error: 0.18154 Loss:0.15843 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.65s\n", - "907872 Examples seen. Accuracy:0.9370 Error: 0.20007 Loss:0.20796 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.91s\n", - "908512 Examples seen. Accuracy:0.9371 Error: 0.16443 Loss:0.15782 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.93s\n", - "909152 Examples seen. Accuracy:0.9358 Error: 0.16584 Loss:0.20556 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 4.22s\n", - "909792 Examples seen. Accuracy:0.9358 Error: 0.12115 Loss:0.08364 Threads: 8 Forward time: 5.00s Backward time: 3.33s Step time: 3.71s\n", - "910432 Examples seen. Accuracy:0.9355 Error: 0.21529 Loss:0.21447 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.71s\n", - "911072 Examples seen. Accuracy:0.9365 Error: 0.08919 Loss:0.05372 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.69s\n", - "911712 Examples seen. Accuracy:0.9371 Error: 0.13669 Loss:0.09794 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.69s\n", - "912352 Examples seen. Accuracy:0.9374 Error: 0.14753 Loss:0.13567 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.69s\n", - "912992 Examples seen. Accuracy:0.9376 Error: 0.13236 Loss:0.13950 Threads: 8 Forward time: 5.02s Backward time: 3.37s Step time: 3.72s\n", - "913632 Examples seen. Accuracy:0.9379 Error: 0.14187 Loss:0.11796 Threads: 8 Forward time: 5.81s Backward time: 3.78s Step time: 4.31s\n", - "914272 Examples seen. Accuracy:0.9366 Error: 0.18883 Loss:0.14902 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.73s\n", - "914912 Examples seen. Accuracy:0.9356 Error: 0.13842 Loss:0.16856 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.72s\n", - "915552 Examples seen. Accuracy:0.9335 Error: 0.31244 Loss:0.41594 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.71s\n", - "916192 Examples seen. Accuracy:0.9336 Error: 0.21795 Loss:0.19179 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.69s\n", - "916832 Examples seen. Accuracy:0.9319 Error: 0.15477 Loss:0.19886 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.70s\n", - "917472 Examples seen. Accuracy:0.9304 Error: 0.11203 Loss:0.11011 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.70s\n", - "918112 Examples seen. Accuracy:0.9288 Error: 0.19572 Loss:0.26014 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.69s\n", - "918752 Examples seen. Accuracy:0.9291 Error: 0.25683 Loss:0.24395 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.76s\n", - "919392 Examples seen. Accuracy:0.9303 Error: 0.14958 Loss:0.15238 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.70s\n", - "920032 Examples seen. Accuracy:0.9318 Error: 0.17831 Loss:0.20759 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.68s\n", - "920672 Examples seen. Accuracy:0.9315 Error: 0.09786 Loss:0.06715 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.70s\n", - "921312 Examples seen. Accuracy:0.9327 Error: 0.16318 Loss:0.14284 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.69s\n", - "921952 Examples seen. Accuracy:0.9337 Error: 0.12429 Loss:0.13042 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.69s\n", - "922592 Examples seen. Accuracy:0.9347 Error: 0.24117 Loss:0.26262 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.71s\n", - "923232 Examples seen. Accuracy:0.9341 Error: 0.30670 Loss:0.37487 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.70s\n", - "923872 Examples seen. Accuracy:0.9346 Error: 0.12323 Loss:0.19294 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.70s\n", - "924512 Examples seen. Accuracy:0.9347 Error: 0.10201 Loss:0.13114 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.71s\n", - "925152 Examples seen. Accuracy:0.9332 Error: 0.14510 Loss:0.14428 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.69s\n", - "925792 Examples seen. Accuracy:0.9335 Error: 0.15169 Loss:0.21652 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.69s\n", - "926432 Examples seen. Accuracy:0.9325 Error: 0.08620 Loss:0.06931 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.67s\n", - "927072 Examples seen. Accuracy:0.9321 Error: 0.29239 Loss:0.31947 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.68s\n", - "927712 Examples seen. Accuracy:0.9321 Error: 0.09640 Loss:0.08032 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.75s\n", - "928352 Examples seen. Accuracy:0.9333 Error: 0.11946 Loss:0.09632 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.69s\n", - "928992 Examples seen. Accuracy:0.9339 Error: 0.24253 Loss:0.25251 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.73s\n", - "929632 Examples seen. Accuracy:0.9333 Error: 0.23032 Loss:0.25145 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.66s\n", - "930272 Examples seen. Accuracy:0.9334 Error: 0.13630 Loss:0.14162 Threads: 8 Forward time: 5.00s Backward time: 3.32s Step time: 3.70s\n", - "930912 Examples seen. Accuracy:0.9325 Error: 0.23179 Loss:0.20368 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.74s\n", - "931552 Examples seen. Accuracy:0.9330 Error: 0.16954 Loss:0.15874 Threads: 8 Forward time: 4.96s Backward time: 3.32s Step time: 3.72s\n", - "932192 Examples seen. Accuracy:0.9327 Error: 0.11960 Loss:0.09325 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.72s\n", - "932832 Examples seen. Accuracy:0.9330 Error: 0.25955 Loss:0.29273 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.70s\n", - "933472 Examples seen. Accuracy:0.9331 Error: 0.13731 Loss:0.08973 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.78s\n", - "934112 Examples seen. Accuracy:0.9333 Error: 0.16796 Loss:0.15477 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.75s\n", - "934752 Examples seen. Accuracy:0.9327 Error: 0.10998 Loss:0.15942 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.73s\n", - "935392 Examples seen. Accuracy:0.9322 Error: 0.16077 Loss:0.13651 Threads: 8 Forward time: 5.07s Backward time: 3.29s Step time: 3.75s\n", - "936032 Examples seen. Accuracy:0.9339 Error: 0.10061 Loss:0.07049 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.77s\n", - "936672 Examples seen. Accuracy:0.9343 Error: 0.28101 Loss:0.31674 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.73s\n", - "937312 Examples seen. Accuracy:0.9341 Error: 0.07737 Loss:0.06687 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "937952 Examples seen. Accuracy:0.9340 Error: 0.23360 Loss:0.23271 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.70s\n", - "938592 Examples seen. Accuracy:0.9349 Error: 0.09053 Loss:0.06425 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.69s\n", - "939232 Examples seen. Accuracy:0.9342 Error: 0.24665 Loss:0.18591 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 3.71s\n", - "939872 Examples seen. Accuracy:0.9354 Error: 0.10677 Loss:0.10166 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.67s\n", - "940512 Examples seen. Accuracy:0.9350 Error: 0.17089 Loss:0.12212 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.69s\n", - "941152 Examples seen. Accuracy:0.9347 Error: 0.19117 Loss:0.16232 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.69s\n", - "941792 Examples seen. Accuracy:0.9345 Error: 0.11992 Loss:0.10492 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.69s\n", - "942432 Examples seen. Accuracy:0.9342 Error: 0.23129 Loss:0.26354 Threads: 8 Forward time: 5.03s Backward time: 3.30s Step time: 3.69s\n", - "943072 Examples seen. Accuracy:0.9346 Error: 0.14941 Loss:0.20970 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.72s\n", - "943712 Examples seen. Accuracy:0.9344 Error: 0.11692 Loss:0.09496 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 4.26s\n", - "944352 Examples seen. Accuracy:0.9355 Error: 0.15168 Loss:0.13723 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.66s\n", - "944992 Examples seen. Accuracy:0.9351 Error: 0.16192 Loss:0.19788 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.65s\n", - "945632 Examples seen. Accuracy:0.9350 Error: 0.17196 Loss:0.22656 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.66s\n", - "946272 Examples seen. Accuracy:0.9350 Error: 0.18142 Loss:0.19020 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.68s\n", - "946912 Examples seen. Accuracy:0.9342 Error: 0.14163 Loss:0.12894 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.68s\n", - "947552 Examples seen. Accuracy:0.9339 Error: 0.22605 Loss:0.20335 Threads: 8 Forward time: 4.90s Backward time: 3.29s Step time: 3.61s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 19 Examples seen:948176 Validation Accuracy: 0.9728 Validation Error: 0.0752 Validation Loss: 0.0796 Total time: 104.86min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 19. Working time: 1.75 hours.\n", - "948816 Examples seen. Accuracy:0.9337 Error: 0.08842 Loss:0.05630 Threads: 8 Forward time: 4.89s Backward time: 3.29s Step time: 3.64s\n", - "949456 Examples seen. Accuracy:0.9335 Error: 0.14330 Loss:0.09962 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.64s\n", - "950096 Examples seen. Accuracy:0.9330 Error: 0.09488 Loss:0.06687 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.64s\n", - "950736 Examples seen. Accuracy:0.9318 Error: 0.17886 Loss:0.12939 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.68s\n", - "951376 Examples seen. Accuracy:0.9325 Error: 0.11387 Loss:0.09973 Threads: 8 Forward time: 4.90s Backward time: 3.28s Step time: 3.62s\n", - "952016 Examples seen. Accuracy:0.9331 Error: 0.13560 Loss:0.10236 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.62s\n", - "952656 Examples seen. Accuracy:0.9338 Error: 0.14782 Loss:0.10988 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.63s\n", - "953296 Examples seen. Accuracy:0.9346 Error: 0.12662 Loss:0.09441 Threads: 8 Forward time: 4.91s Backward time: 3.28s Step time: 3.61s\n", - "953936 Examples seen. Accuracy:0.9348 Error: 0.20488 Loss:0.22021 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.61s\n", - "954576 Examples seen. Accuracy:0.9339 Error: 0.09196 Loss:0.07950 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.63s\n", - "955216 Examples seen. Accuracy:0.9325 Error: 0.10137 Loss:0.07042 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.67s\n", - "955856 Examples seen. Accuracy:0.9318 Error: 0.11629 Loss:0.14477 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.61s\n", - "956496 Examples seen. Accuracy:0.9322 Error: 0.09635 Loss:0.08045 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.61s\n", - "957136 Examples seen. Accuracy:0.9296 Error: 0.21215 Loss:0.33157 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.60s\n", - "957776 Examples seen. Accuracy:0.9294 Error: 0.26400 Loss:0.29434 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.61s\n", - "958416 Examples seen. Accuracy:0.9298 Error: 0.23262 Loss:0.32465 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.63s\n", - "959056 Examples seen. Accuracy:0.9296 Error: 0.25139 Loss:0.25538 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.65s\n", - "959696 Examples seen. Accuracy:0.9308 Error: 0.07061 Loss:0.04188 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.76s\n", - "960336 Examples seen. Accuracy:0.9303 Error: 0.14351 Loss:0.11191 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "960976 Examples seen. Accuracy:0.9314 Error: 0.13034 Loss:0.17459 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.72s\n", - "961616 Examples seen. Accuracy:0.9317 Error: 0.18498 Loss:0.15089 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.68s\n", - "962256 Examples seen. Accuracy:0.9319 Error: 0.15155 Loss:0.11495 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.72s\n", - "962896 Examples seen. Accuracy:0.9314 Error: 0.17474 Loss:0.20256 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "963536 Examples seen. Accuracy:0.9327 Error: 0.09224 Loss:0.06967 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.65s\n", - "964176 Examples seen. Accuracy:0.9336 Error: 0.15469 Loss:0.12259 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.64s\n", - "964816 Examples seen. Accuracy:0.9345 Error: 0.16874 Loss:0.16323 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "965456 Examples seen. Accuracy:0.9330 Error: 0.21291 Loss:0.26617 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.70s\n", - "966096 Examples seen. Accuracy:0.9335 Error: 0.07029 Loss:0.04219 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.72s\n", - "966736 Examples seen. Accuracy:0.9343 Error: 0.12966 Loss:0.08937 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.71s\n", - "967376 Examples seen. Accuracy:0.9351 Error: 0.11018 Loss:0.11500 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.69s\n", - "968016 Examples seen. Accuracy:0.9369 Error: 0.14996 Loss:0.13956 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.72s\n", - "968656 Examples seen. Accuracy:0.9377 Error: 0.13940 Loss:0.11225 Threads: 8 Forward time: 7.83s Backward time: 4.96s Step time: 4.19s\n", - "969296 Examples seen. Accuracy:0.9374 Error: 0.12336 Loss:0.15393 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.73s\n", - "969936 Examples seen. Accuracy:0.9383 Error: 0.16669 Loss:0.13981 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "970576 Examples seen. Accuracy:0.9370 Error: 0.15209 Loss:0.11699 Threads: 8 Forward time: 5.18s Backward time: 3.35s Step time: 3.73s\n", - "971216 Examples seen. Accuracy:0.9369 Error: 0.14281 Loss:0.14358 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.66s\n", - "971856 Examples seen. Accuracy:0.9376 Error: 0.10629 Loss:0.10674 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.74s\n", - "972496 Examples seen. Accuracy:0.9380 Error: 0.17030 Loss:0.21394 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.73s\n", - "973136 Examples seen. Accuracy:0.9364 Error: 0.20765 Loss:0.19736 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.71s\n", - "973776 Examples seen. Accuracy:0.9386 Error: 0.11261 Loss:0.07438 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.66s\n", - "974416 Examples seen. Accuracy:0.9400 Error: 0.06419 Loss:0.03617 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "975056 Examples seen. Accuracy:0.9402 Error: 0.16715 Loss:0.20872 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.66s\n", - "975696 Examples seen. Accuracy:0.9413 Error: 0.11517 Loss:0.07074 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.65s\n", - "976336 Examples seen. Accuracy:0.9396 Error: 0.16187 Loss:0.13369 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.66s\n", - "976976 Examples seen. Accuracy:0.9398 Error: 0.12819 Loss:0.15170 Threads: 8 Forward time: 4.89s Backward time: 3.24s Step time: 3.64s\n", - "977616 Examples seen. Accuracy:0.9382 Error: 0.22360 Loss:0.22564 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.68s\n", - "978256 Examples seen. Accuracy:0.9357 Error: 0.14552 Loss:0.14895 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.63s\n", - "978896 Examples seen. Accuracy:0.9350 Error: 0.17976 Loss:0.15143 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.66s\n", - "979536 Examples seen. Accuracy:0.9356 Error: 0.12707 Loss:0.11543 Threads: 8 Forward time: 4.91s Backward time: 3.27s Step time: 3.63s\n", - "980176 Examples seen. Accuracy:0.9363 Error: 0.18955 Loss:0.20754 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.62s\n", - "980816 Examples seen. Accuracy:0.9381 Error: 0.30464 Loss:0.38584 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.61s\n", - "981456 Examples seen. Accuracy:0.9382 Error: 0.10138 Loss:0.09692 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.60s\n", - "982096 Examples seen. Accuracy:0.9397 Error: 0.18930 Loss:0.14706 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.60s\n", - "982736 Examples seen. Accuracy:0.9392 Error: 0.15525 Loss:0.16459 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.60s\n", - "983376 Examples seen. Accuracy:0.9389 Error: 0.17220 Loss:0.19944 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.61s\n", - "984016 Examples seen. Accuracy:0.9375 Error: 0.13405 Loss:0.11980 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.63s\n", - "984656 Examples seen. Accuracy:0.9381 Error: 0.23697 Loss:0.39276 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.61s\n", - "985296 Examples seen. Accuracy:0.9386 Error: 0.17989 Loss:0.19990 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.62s\n", - "985936 Examples seen. Accuracy:0.9391 Error: 0.19084 Loss:0.17228 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.61s\n", - "986576 Examples seen. Accuracy:0.9397 Error: 0.12895 Loss:0.14391 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.65s\n", - "987216 Examples seen. Accuracy:0.9412 Error: 0.09183 Loss:0.07460 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.62s\n", - "987856 Examples seen. Accuracy:0.9402 Error: 0.24106 Loss:0.31260 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.62s\n", - "988496 Examples seen. Accuracy:0.9398 Error: 0.27409 Loss:0.36580 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.61s\n", - "989136 Examples seen. Accuracy:0.9399 Error: 0.19434 Loss:0.14114 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.62s\n", - "989776 Examples seen. Accuracy:0.9397 Error: 0.18174 Loss:0.21309 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.62s\n", - "990416 Examples seen. Accuracy:0.9383 Error: 0.17824 Loss:0.16778 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.64s\n", - "991056 Examples seen. Accuracy:0.9383 Error: 0.16129 Loss:0.20397 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.67s\n", - "991696 Examples seen. Accuracy:0.9375 Error: 0.22722 Loss:0.18484 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.65s\n", - "992336 Examples seen. Accuracy:0.9355 Error: 0.09220 Loss:0.06090 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.65s\n", - "992976 Examples seen. Accuracy:0.9365 Error: 0.13139 Loss:0.14182 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.64s\n", - "993616 Examples seen. Accuracy:0.9374 Error: 0.10060 Loss:0.06814 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.63s\n", - "994256 Examples seen. Accuracy:0.9379 Error: 0.20826 Loss:0.28773 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.69s\n", - "994896 Examples seen. Accuracy:0.9384 Error: 0.15047 Loss:0.20643 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.62s\n", - "995536 Examples seen. Accuracy:0.9381 Error: 0.11685 Loss:0.11920 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.65s\n", - "996176 Examples seen. Accuracy:0.9391 Error: 0.10343 Loss:0.09796 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.65s\n", - "996816 Examples seen. Accuracy:0.9393 Error: 0.12977 Loss:0.14622 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.68s\n", - "997456 Examples seen. Accuracy:0.9386 Error: 0.10276 Loss:0.06828 Threads: 8 Forward time: 4.95s Backward time: 3.35s Step time: 3.70s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 20 Examples seen:998080 Validation Accuracy: 0.9754 Validation Error: 0.0716 Validation Loss: 0.0771 Total time: 110.10min\n", - "Starting Testing.\n", - "Epochs: 20 Examples seen:998080 Test Accuracy: 0.9819 Test Error: 0.0635 Test Loss: 0.0574 Total time: 110.57min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 20. Working time: 1.84 hours.\n", - "Learning rate set to:0.00082\n", - "998720 Examples seen. Accuracy:0.9380 Error: 0.15606 Loss:0.11226 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.69s\n", - "999360 Examples seen. Accuracy:0.9380 Error: 0.17582 Loss:0.20992 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.66s\n", - "1000000 Examples seen. Accuracy:0.9390 Error: 0.16061 Loss:0.11689 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.67s\n", - "1000640 Examples seen. Accuracy:0.9385 Error: 0.18791 Loss:0.21620 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.64s\n", - "1001280 Examples seen. Accuracy:0.9387 Error: 0.15905 Loss:0.11588 Threads: 8 Forward time: 5.35s Backward time: 3.57s Step time: 5.28s\n", - "1001920 Examples seen. Accuracy:0.9381 Error: 0.12402 Loss:0.09285 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 4.23s\n", - "1002560 Examples seen. Accuracy:0.9380 Error: 0.20191 Loss:0.21335 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.72s\n", - "1003200 Examples seen. Accuracy:0.9387 Error: 0.14123 Loss:0.25109 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.74s\n", - "1003840 Examples seen. Accuracy:0.9368 Error: 0.31502 Loss:0.49339 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1004480 Examples seen. Accuracy:0.9373 Error: 0.05675 Loss:0.04700 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.71s\n", - "1005120 Examples seen. Accuracy:0.9370 Error: 0.15868 Loss:0.16694 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.72s\n", - "1005760 Examples seen. Accuracy:0.9374 Error: 0.24801 Loss:0.42999 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.70s\n", - "1006400 Examples seen. Accuracy:0.9394 Error: 0.14046 Loss:0.10851 Threads: 8 Forward time: 4.99s Backward time: 3.34s Step time: 3.70s\n", - "1007040 Examples seen. Accuracy:0.9394 Error: 0.10072 Loss:0.10989 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.68s\n", - "1007680 Examples seen. Accuracy:0.9389 Error: 0.19668 Loss:0.20501 Threads: 8 Forward time: 5.02s Backward time: 3.33s Step time: 3.66s\n", - "1008320 Examples seen. Accuracy:0.9388 Error: 0.12827 Loss:0.13005 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.69s\n", - "1008960 Examples seen. Accuracy:0.9397 Error: 0.24293 Loss:0.41236 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.66s\n", - "1009600 Examples seen. Accuracy:0.9396 Error: 0.10360 Loss:0.07998 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.65s\n", - "1010240 Examples seen. Accuracy:0.9385 Error: 0.12878 Loss:0.18624 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.76s\n", - "1010880 Examples seen. Accuracy:0.9391 Error: 0.15860 Loss:0.14407 Threads: 8 Forward time: 5.14s Backward time: 3.29s Step time: 3.71s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1011520 Examples seen. Accuracy:0.9406 Error: 0.10252 Loss:0.07616 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.68s\n", - "1012160 Examples seen. Accuracy:0.9400 Error: 0.17599 Loss:0.17918 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.66s\n", - "1012800 Examples seen. Accuracy:0.9402 Error: 0.07600 Loss:0.05283 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.65s\n", - "1013440 Examples seen. Accuracy:0.9412 Error: 0.13442 Loss:0.11871 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.65s\n", - "1014080 Examples seen. Accuracy:0.9410 Error: 0.15921 Loss:0.14679 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.65s\n", - "1014720 Examples seen. Accuracy:0.9400 Error: 0.12354 Loss:0.15243 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.68s\n", - "1015360 Examples seen. Accuracy:0.9400 Error: 0.12499 Loss:0.09048 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.71s\n", - "1016000 Examples seen. Accuracy:0.9417 Error: 0.13423 Loss:0.14851 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.75s\n", - "1016640 Examples seen. Accuracy:0.9416 Error: 0.24924 Loss:0.25736 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.69s\n", - "1017280 Examples seen. Accuracy:0.9422 Error: 0.08832 Loss:0.05413 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.69s\n", - "1017920 Examples seen. Accuracy:0.9409 Error: 0.18979 Loss:0.19725 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.71s\n", - "1018560 Examples seen. Accuracy:0.9401 Error: 0.20400 Loss:0.22846 Threads: 8 Forward time: 5.05s Backward time: 3.34s Step time: 3.75s\n", - "1019200 Examples seen. Accuracy:0.9398 Error: 0.19464 Loss:0.16639 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 4.29s\n", - "1019840 Examples seen. Accuracy:0.9395 Error: 0.20691 Loss:0.21282 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.72s\n", - "1020480 Examples seen. Accuracy:0.9421 Error: 0.11174 Loss:0.10274 Threads: 8 Forward time: 4.99s Backward time: 3.32s Step time: 3.67s\n", - "1021120 Examples seen. Accuracy:0.9417 Error: 0.13866 Loss:0.15672 Threads: 8 Forward time: 4.91s Backward time: 3.29s Step time: 3.68s\n", - "1021760 Examples seen. Accuracy:0.9409 Error: 0.11041 Loss:0.15807 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.66s\n", - "1022400 Examples seen. Accuracy:0.9403 Error: 0.14476 Loss:0.13817 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1023040 Examples seen. Accuracy:0.9405 Error: 0.09324 Loss:0.07487 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.67s\n", - "1023680 Examples seen. Accuracy:0.9419 Error: 0.09300 Loss:0.05732 Threads: 8 Forward time: 4.98s Backward time: 3.33s Step time: 3.68s\n", - "1024320 Examples seen. Accuracy:0.9409 Error: 0.11892 Loss:0.08042 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.67s\n", - "1024960 Examples seen. Accuracy:0.9409 Error: 0.17977 Loss:0.14857 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.70s\n", - "1025600 Examples seen. Accuracy:0.9399 Error: 0.10192 Loss:0.07094 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.65s\n", - "1026240 Examples seen. Accuracy:0.9409 Error: 0.20096 Loss:0.26709 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.65s\n", - "1026880 Examples seen. Accuracy:0.9423 Error: 0.19891 Loss:0.22236 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.66s\n", - "1027520 Examples seen. Accuracy:0.9417 Error: 0.14905 Loss:0.14301 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.67s\n", - "1028160 Examples seen. Accuracy:0.9421 Error: 0.11452 Loss:0.07844 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.68s\n", - "1028800 Examples seen. Accuracy:0.9419 Error: 0.21365 Loss:0.28618 Threads: 8 Forward time: 4.92s Backward time: 3.30s Step time: 3.68s\n", - "1029440 Examples seen. Accuracy:0.9402 Error: 0.21966 Loss:0.26615 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.69s\n", - "1030080 Examples seen. Accuracy:0.9408 Error: 0.16668 Loss:0.15809 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.66s\n", - "1030720 Examples seen. Accuracy:0.9412 Error: 0.06505 Loss:0.03781 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.67s\n", - "1031360 Examples seen. Accuracy:0.9430 Error: 0.12582 Loss:0.14228 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.68s\n", - "1032000 Examples seen. Accuracy:0.9438 Error: 0.08969 Loss:0.06734 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.68s\n", - "1032640 Examples seen. Accuracy:0.9432 Error: 0.16696 Loss:0.17333 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.68s\n", - "1033280 Examples seen. Accuracy:0.9434 Error: 0.18620 Loss:0.21042 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.67s\n", - "1033920 Examples seen. Accuracy:0.9434 Error: 0.23867 Loss:0.19954 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.66s\n", - "1034560 Examples seen. Accuracy:0.9428 Error: 0.15853 Loss:0.15873 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.68s\n", - "1035200 Examples seen. Accuracy:0.9413 Error: 0.16289 Loss:0.14258 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.67s\n", - "1035840 Examples seen. Accuracy:0.9406 Error: 0.21517 Loss:0.25260 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.69s\n", - "1036480 Examples seen. Accuracy:0.9417 Error: 0.16048 Loss:0.15307 Threads: 8 Forward time: 4.96s Backward time: 3.30s Step time: 3.66s\n", - "1037120 Examples seen. Accuracy:0.9418 Error: 0.28925 Loss:0.31492 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.66s\n", - "1037760 Examples seen. Accuracy:0.9411 Error: 0.19507 Loss:0.16127 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.70s\n", - "1038400 Examples seen. Accuracy:0.9401 Error: 0.10647 Loss:0.16149 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.65s\n", - "1039040 Examples seen. Accuracy:0.9404 Error: 0.22006 Loss:0.20029 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.72s\n", - "1039680 Examples seen. Accuracy:0.9391 Error: 0.25526 Loss:0.39782 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.70s\n", - "1040320 Examples seen. Accuracy:0.9391 Error: 0.08865 Loss:0.06161 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.75s\n", - "1040960 Examples seen. Accuracy:0.9401 Error: 0.15010 Loss:0.13009 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.75s\n", - "1041600 Examples seen. Accuracy:0.9397 Error: 0.16926 Loss:0.15399 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.69s\n", - "1042240 Examples seen. Accuracy:0.9407 Error: 0.11042 Loss:0.13679 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.72s\n", - "1042880 Examples seen. Accuracy:0.9402 Error: 0.08517 Loss:0.05677 Threads: 8 Forward time: 5.03s Backward time: 3.30s Step time: 3.70s\n", - "1043520 Examples seen. Accuracy:0.9399 Error: 0.15405 Loss:0.21766 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.76s\n", - "1044160 Examples seen. Accuracy:0.9399 Error: 0.12258 Loss:0.11280 Threads: 8 Forward time: 4.95s Backward time: 3.33s Step time: 3.76s\n", - "1044800 Examples seen. Accuracy:0.9390 Error: 0.14773 Loss:0.17697 Threads: 8 Forward time: 5.09s Backward time: 3.32s Step time: 3.80s\n", - "1045440 Examples seen. Accuracy:0.9389 Error: 0.14281 Loss:0.16803 Threads: 8 Forward time: 5.05s Backward time: 3.31s Step time: 3.73s\n", - "1046080 Examples seen. Accuracy:0.9407 Error: 0.12623 Loss:0.12138 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.72s\n", - "1046720 Examples seen. Accuracy:0.9402 Error: 0.10736 Loss:0.13168 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.81s\n", - "1047360 Examples seen. Accuracy:0.9409 Error: 0.07710 Loss:0.04530 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.80s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 21 Examples seen:1047984 Validation Accuracy: 0.9757 Validation Error: 0.0693 Validation Loss: 0.0751 Total time: 115.90min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 21. Working time: 1.93 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1048624 Examples seen. Accuracy:0.9416 Error: 0.22140 Loss:0.27706 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.73s\n", - "1049264 Examples seen. Accuracy:0.9417 Error: 0.07956 Loss:0.04851 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.70s\n", - "1049904 Examples seen. Accuracy:0.9418 Error: 0.13258 Loss:0.09884 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.72s\n", - "1050544 Examples seen. Accuracy:0.9417 Error: 0.08471 Loss:0.05805 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.75s\n", - "1051184 Examples seen. Accuracy:0.9405 Error: 0.16332 Loss:0.15157 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.72s\n", - "1051824 Examples seen. Accuracy:0.9418 Error: 0.09365 Loss:0.08231 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.72s\n", - "1052464 Examples seen. Accuracy:0.9430 Error: 0.12558 Loss:0.08224 Threads: 8 Forward time: 4.95s Backward time: 3.32s Step time: 3.72s\n", - "1053104 Examples seen. Accuracy:0.9451 Error: 0.05609 Loss:0.07770 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.67s\n", - "1053744 Examples seen. Accuracy:0.9462 Error: 0.06936 Loss:0.04095 Threads: 8 Forward time: 4.91s Backward time: 3.27s Step time: 3.67s\n", - "1054384 Examples seen. Accuracy:0.9464 Error: 0.17336 Loss:0.16978 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.67s\n", - "1055024 Examples seen. Accuracy:0.9467 Error: 0.07723 Loss:0.05837 Threads: 8 Forward time: 4.94s Backward time: 3.30s Step time: 3.72s\n", - "1055664 Examples seen. Accuracy:0.9461 Error: 0.09990 Loss:0.06956 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.66s\n", - "1056304 Examples seen. Accuracy:0.9462 Error: 0.17575 Loss:0.17882 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.66s\n", - "1056944 Examples seen. Accuracy:0.9452 Error: 0.10842 Loss:0.08968 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.68s\n", - "1057584 Examples seen. Accuracy:0.9447 Error: 0.23762 Loss:0.23375 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.70s\n", - "1058224 Examples seen. Accuracy:0.9453 Error: 0.09969 Loss:0.11526 Threads: 8 Forward time: 4.92s Backward time: 3.33s Step time: 3.69s\n", - "1058864 Examples seen. Accuracy:0.9457 Error: 0.14085 Loss:0.09528 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.72s\n", - "1059504 Examples seen. Accuracy:0.9451 Error: 0.09112 Loss:0.06185 Threads: 8 Forward time: 4.94s Backward time: 3.32s Step time: 3.67s\n", - "1060144 Examples seen. Accuracy:0.9457 Error: 0.17827 Loss:0.14106 Threads: 8 Forward time: 4.94s Backward time: 3.31s Step time: 3.68s\n", - "1060784 Examples seen. Accuracy:0.9452 Error: 0.14503 Loss:0.15594 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.67s\n", - "1061424 Examples seen. Accuracy:0.9423 Error: 0.16498 Loss:0.25695 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.70s\n", - "1062064 Examples seen. Accuracy:0.9404 Error: 0.16148 Loss:0.15608 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.67s\n", - "1062704 Examples seen. Accuracy:0.9398 Error: 0.20150 Loss:0.26722 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.70s\n", - "1063344 Examples seen. Accuracy:0.9389 Error: 0.19005 Loss:0.21478 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.65s\n", - "1063984 Examples seen. Accuracy:0.9382 Error: 0.19322 Loss:0.20439 Threads: 8 Forward time: 4.92s Backward time: 3.28s Step time: 3.71s\n", - "1064624 Examples seen. Accuracy:0.9388 Error: 0.26689 Loss:0.28840 Threads: 8 Forward time: 5.06s Backward time: 3.30s Step time: 3.71s\n", - "1065264 Examples seen. Accuracy:0.9407 Error: 0.12291 Loss:0.10567 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.73s\n", - "1065904 Examples seen. Accuracy:0.9408 Error: 0.19481 Loss:0.15827 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.69s\n", - "1066544 Examples seen. Accuracy:0.9393 Error: 0.05947 Loss:0.03649 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.74s\n", - "1067184 Examples seen. Accuracy:0.9397 Error: 0.17932 Loss:0.18402 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.70s\n", - "1067824 Examples seen. Accuracy:0.9417 Error: 0.16040 Loss:0.11998 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.72s\n", - "1068464 Examples seen. Accuracy:0.9419 Error: 0.15005 Loss:0.10680 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.66s\n", - "1069104 Examples seen. Accuracy:0.9416 Error: 0.14459 Loss:0.12605 Threads: 8 Forward time: 5.04s Backward time: 3.33s Step time: 3.68s\n", - "1069744 Examples seen. Accuracy:0.9419 Error: 0.16791 Loss:0.17241 Threads: 8 Forward time: 5.08s Backward time: 3.31s Step time: 3.76s\n", - "1070384 Examples seen. Accuracy:0.9413 Error: 0.17009 Loss:0.16272 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.68s\n", - "1071024 Examples seen. Accuracy:0.9409 Error: 0.11426 Loss:0.08304 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.68s\n", - "1071664 Examples seen. Accuracy:0.9405 Error: 0.20175 Loss:0.24715 Threads: 8 Forward time: 4.98s Backward time: 3.31s Step time: 3.72s\n", - "1072304 Examples seen. Accuracy:0.9399 Error: 0.21003 Loss:0.19355 Threads: 8 Forward time: 5.02s Backward time: 3.37s Step time: 3.73s\n", - "1072944 Examples seen. Accuracy:0.9400 Error: 0.19522 Loss:0.14412 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 4.29s\n", - "1073584 Examples seen. Accuracy:0.9417 Error: 0.15198 Loss:0.14219 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.71s\n", - "1074224 Examples seen. Accuracy:0.9408 Error: 0.10407 Loss:0.13263 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.70s\n", - "1074864 Examples seen. Accuracy:0.9415 Error: 0.17778 Loss:0.16864 Threads: 8 Forward time: 4.92s Backward time: 3.29s Step time: 3.70s\n", - "1075504 Examples seen. Accuracy:0.9425 Error: 0.13496 Loss:0.14462 Threads: 8 Forward time: 5.11s Backward time: 3.40s Step time: 3.79s\n", - "1076144 Examples seen. Accuracy:0.9425 Error: 0.09835 Loss:0.11468 Threads: 8 Forward time: 5.04s Backward time: 3.33s Step time: 3.72s\n", - "1076784 Examples seen. Accuracy:0.9425 Error: 0.18631 Loss:0.16566 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.70s\n", - "1077424 Examples seen. Accuracy:0.9419 Error: 0.21663 Loss:0.27832 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.70s\n", - "1078064 Examples seen. Accuracy:0.9421 Error: 0.15323 Loss:0.11182 Threads: 8 Forward time: 5.09s Backward time: 3.33s Step time: 3.73s\n", - "1078704 Examples seen. Accuracy:0.9418 Error: 0.16705 Loss:0.18876 Threads: 8 Forward time: 5.30s Backward time: 3.41s Step time: 3.79s\n", - "1079344 Examples seen. Accuracy:0.9407 Error: 0.17315 Loss:0.16612 Threads: 8 Forward time: 4.95s Backward time: 3.30s Step time: 3.72s\n", - "1079984 Examples seen. Accuracy:0.9407 Error: 0.19254 Loss:0.17098 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.75s\n", - "1080624 Examples seen. Accuracy:0.9406 Error: 0.17402 Loss:0.15303 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.69s\n", - "1081264 Examples seen. Accuracy:0.9397 Error: 0.12523 Loss:0.11633 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.68s\n", - "1081904 Examples seen. Accuracy:0.9395 Error: 0.13804 Loss:0.10574 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.68s\n", - "1082544 Examples seen. Accuracy:0.9402 Error: 0.08202 Loss:0.05414 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.71s\n", - "1083184 Examples seen. Accuracy:0.9404 Error: 0.10537 Loss:0.07908 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.70s\n", - "1083824 Examples seen. Accuracy:0.9416 Error: 0.14569 Loss:0.10550 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.67s\n", - "1084464 Examples seen. Accuracy:0.9412 Error: 0.17179 Loss:0.15162 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.66s\n", - "1085104 Examples seen. Accuracy:0.9409 Error: 0.12321 Loss:0.11928 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.67s\n", - "1085744 Examples seen. Accuracy:0.9412 Error: 0.14831 Loss:0.15116 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1086384 Examples seen. Accuracy:0.9416 Error: 0.12226 Loss:0.09902 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.75s\n", - "1087024 Examples seen. Accuracy:0.9425 Error: 0.08110 Loss:0.05480 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.68s\n", - "1087664 Examples seen. Accuracy:0.9420 Error: 0.14540 Loss:0.12781 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.68s\n", - "1088304 Examples seen. Accuracy:0.9429 Error: 0.09252 Loss:0.08647 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.71s\n", - "1088944 Examples seen. Accuracy:0.9429 Error: 0.12294 Loss:0.13806 Threads: 8 Forward time: 4.96s Backward time: 3.31s Step time: 3.71s\n", - "1089584 Examples seen. Accuracy:0.9441 Error: 0.09385 Loss:0.06357 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.72s\n", - "1090224 Examples seen. Accuracy:0.9422 Error: 0.18190 Loss:0.17900 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.72s\n", - "1090864 Examples seen. Accuracy:0.9427 Error: 0.17771 Loss:0.14552 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.72s\n", - "1091504 Examples seen. Accuracy:0.9449 Error: 0.06497 Loss:0.07365 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.71s\n", - "1092144 Examples seen. Accuracy:0.9442 Error: 0.19330 Loss:0.20501 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.66s\n", - "1092784 Examples seen. Accuracy:0.9436 Error: 0.22128 Loss:0.22584 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n", - "1093424 Examples seen. Accuracy:0.9420 Error: 0.11000 Loss:0.09795 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.66s\n", - "1094064 Examples seen. Accuracy:0.9423 Error: 0.11610 Loss:0.10722 Threads: 8 Forward time: 5.09s Backward time: 3.32s Step time: 3.71s\n", - "1094704 Examples seen. Accuracy:0.9427 Error: 0.14846 Loss:0.13134 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.69s\n", - "1095344 Examples seen. Accuracy:0.9412 Error: 0.15211 Loss:0.13793 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.67s\n", - "1095984 Examples seen. Accuracy:0.9411 Error: 0.14959 Loss:0.13270 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.67s\n", - "1096624 Examples seen. Accuracy:0.9424 Error: 0.13953 Loss:0.11018 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.68s\n", - "1097264 Examples seen. Accuracy:0.9425 Error: 0.12376 Loss:0.12334 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.74s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 22 Examples seen:1097888 Validation Accuracy: 0.9783 Validation Error: 0.0654 Validation Loss: 0.0716 Total time: 121.21min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 22. Working time: 2.02 hours.\n", - "1098528 Examples seen. Accuracy:0.9423 Error: 0.09284 Loss:0.06167 Threads: 8 Forward time: 5.02s Backward time: 3.32s Step time: 3.73s\n", - "1099168 Examples seen. Accuracy:0.9438 Error: 0.08001 Loss:0.14173 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1099808 Examples seen. Accuracy:0.9434 Error: 0.22561 Loss:0.27127 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.63s\n", - "1100448 Examples seen. Accuracy:0.9434 Error: 0.23495 Loss:0.20568 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.65s\n", - "1101088 Examples seen. Accuracy:0.9433 Error: 0.11784 Loss:0.11380 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.64s\n", - "1101728 Examples seen. Accuracy:0.9425 Error: 0.18355 Loss:0.19803 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.66s\n", - "1102368 Examples seen. Accuracy:0.9425 Error: 0.23640 Loss:0.22638 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.63s\n", - "1103008 Examples seen. Accuracy:0.9429 Error: 0.11586 Loss:0.10133 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1103648 Examples seen. Accuracy:0.9439 Error: 0.10770 Loss:0.17222 Threads: 8 Forward time: 4.93s Backward time: 3.32s Step time: 3.70s\n", - "1104288 Examples seen. Accuracy:0.9431 Error: 0.17562 Loss:0.22074 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.71s\n", - "1104928 Examples seen. Accuracy:0.9429 Error: 0.17727 Loss:0.21970 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.64s\n", - "1105568 Examples seen. Accuracy:0.9419 Error: 0.14166 Loss:0.14599 Threads: 8 Forward time: 5.21s Backward time: 3.32s Step time: 3.80s\n", - "1106208 Examples seen. Accuracy:0.9432 Error: 0.11440 Loss:0.08004 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.70s\n", - "1106848 Examples seen. Accuracy:0.9439 Error: 0.12359 Loss:0.12816 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.69s\n", - "1107488 Examples seen. Accuracy:0.9441 Error: 0.20007 Loss:0.19580 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.69s\n", - "1108128 Examples seen. Accuracy:0.9457 Error: 0.18324 Loss:0.20063 Threads: 8 Forward time: 4.93s Backward time: 3.29s Step time: 3.68s\n", - "1108768 Examples seen. Accuracy:0.9462 Error: 0.20224 Loss:0.18220 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.70s\n", - "1109408 Examples seen. Accuracy:0.9464 Error: 0.09220 Loss:0.08859 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.68s\n", - "1110048 Examples seen. Accuracy:0.9459 Error: 0.13530 Loss:0.10695 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1110688 Examples seen. Accuracy:0.9443 Error: 0.15863 Loss:0.13609 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.69s\n", - "1111328 Examples seen. Accuracy:0.9436 Error: 0.18403 Loss:0.17732 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1111968 Examples seen. Accuracy:0.9424 Error: 0.23619 Loss:0.26713 Threads: 8 Forward time: 5.04s Backward time: 3.31s Step time: 3.71s\n", - "1112608 Examples seen. Accuracy:0.9416 Error: 0.09557 Loss:0.07480 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.68s\n", - "1113248 Examples seen. Accuracy:0.9417 Error: 0.20328 Loss:0.22807 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.71s\n", - "1113888 Examples seen. Accuracy:0.9417 Error: 0.13156 Loss:0.16044 Threads: 8 Forward time: 5.05s Backward time: 3.31s Step time: 3.71s\n", - "1114528 Examples seen. Accuracy:0.9428 Error: 0.17281 Loss:0.16819 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1115168 Examples seen. Accuracy:0.9429 Error: 0.08148 Loss:0.07883 Threads: 8 Forward time: 4.97s Backward time: 3.30s Step time: 3.71s\n", - "1115808 Examples seen. Accuracy:0.9433 Error: 0.17055 Loss:0.13726 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.68s\n", - "1116448 Examples seen. Accuracy:0.9436 Error: 0.14327 Loss:0.22397 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.68s\n", - "1117088 Examples seen. Accuracy:0.9446 Error: 0.17658 Loss:0.13030 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.69s\n", - "1117728 Examples seen. Accuracy:0.9446 Error: 0.12643 Loss:0.13459 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.68s\n", - "1118368 Examples seen. Accuracy:0.9456 Error: 0.08116 Loss:0.08514 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 3.75s\n", - "1119008 Examples seen. Accuracy:0.9452 Error: 0.12995 Loss:0.09499 Threads: 8 Forward time: 4.97s Backward time: 3.31s Step time: 3.74s\n", - "1119648 Examples seen. Accuracy:0.9436 Error: 0.27118 Loss:0.34248 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.71s\n", - "1120288 Examples seen. Accuracy:0.9426 Error: 0.17803 Loss:0.23948 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.70s\n", - "1120928 Examples seen. Accuracy:0.9413 Error: 0.10791 Loss:0.15848 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.69s\n", - "1121568 Examples seen. Accuracy:0.9408 Error: 0.09225 Loss:0.06921 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.69s\n", - "1122208 Examples seen. Accuracy:0.9415 Error: 0.18588 Loss:0.15055 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1122848 Examples seen. Accuracy:0.9417 Error: 0.12132 Loss:0.12098 Threads: 8 Forward time: 5.03s Backward time: 3.33s Step time: 3.73s\n", - "1123488 Examples seen. Accuracy:0.9402 Error: 0.12329 Loss:0.12068 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.71s\n", - "1124128 Examples seen. Accuracy:0.9416 Error: 0.12067 Loss:0.10317 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.70s\n", - "1124768 Examples seen. Accuracy:0.9414 Error: 0.15002 Loss:0.11572 Threads: 8 Forward time: 5.09s Backward time: 3.30s Step time: 3.72s\n", - "1125408 Examples seen. Accuracy:0.9415 Error: 0.14510 Loss:0.16107 Threads: 8 Forward time: 5.05s Backward time: 3.29s Step time: 3.72s\n", - "1126048 Examples seen. Accuracy:0.9412 Error: 0.18075 Loss:0.17556 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.73s\n", - "1126688 Examples seen. Accuracy:0.9413 Error: 0.14880 Loss:0.10763 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 4.33s\n", - "1127328 Examples seen. Accuracy:0.9419 Error: 0.12706 Loss:0.13520 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.71s\n", - "1127968 Examples seen. Accuracy:0.9406 Error: 0.24491 Loss:0.26587 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.64s\n", - "1128608 Examples seen. Accuracy:0.9409 Error: 0.17438 Loss:0.18315 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.61s\n", - "1129248 Examples seen. Accuracy:0.9405 Error: 0.21757 Loss:0.21919 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.64s\n", - "1129888 Examples seen. Accuracy:0.9407 Error: 0.15371 Loss:0.12137 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.66s\n", - "1130528 Examples seen. Accuracy:0.9410 Error: 0.10420 Loss:0.06490 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.62s\n", - "1131168 Examples seen. Accuracy:0.9424 Error: 0.15544 Loss:0.10561 Threads: 8 Forward time: 5.00s Backward time: 3.30s Step time: 3.64s\n", - "1131808 Examples seen. Accuracy:0.9432 Error: 0.13716 Loss:0.09079 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.62s\n", - "1132448 Examples seen. Accuracy:0.9430 Error: 0.22261 Loss:0.18629 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.65s\n", - "1133088 Examples seen. Accuracy:0.9431 Error: 0.21223 Loss:0.21449 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.64s\n", - "1133728 Examples seen. Accuracy:0.9435 Error: 0.11453 Loss:0.09399 Threads: 8 Forward time: 5.05s Backward time: 3.33s Step time: 3.70s\n", - "1134368 Examples seen. Accuracy:0.9422 Error: 0.23880 Loss:0.28010 Threads: 8 Forward time: 4.91s Backward time: 3.31s Step time: 3.68s\n", - "1135008 Examples seen. Accuracy:0.9423 Error: 0.10716 Loss:0.08877 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.70s\n", - "1135648 Examples seen. Accuracy:0.9419 Error: 0.19055 Loss:0.17048 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.63s\n", - "1136288 Examples seen. Accuracy:0.9423 Error: 0.25915 Loss:0.30452 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.63s\n", - "1136928 Examples seen. Accuracy:0.9433 Error: 0.11288 Loss:0.09032 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.63s\n", - "1137568 Examples seen. Accuracy:0.9430 Error: 0.21124 Loss:0.18885 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.64s\n", - "1138208 Examples seen. Accuracy:0.9425 Error: 0.15065 Loss:0.16672 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.64s\n", - "1138848 Examples seen. Accuracy:0.9428 Error: 0.13827 Loss:0.13103 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.73s\n", - "1139488 Examples seen. Accuracy:0.9421 Error: 0.16019 Loss:0.17581 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.64s\n", - "1140128 Examples seen. Accuracy:0.9431 Error: 0.10862 Loss:0.09329 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.68s\n", - "1140768 Examples seen. Accuracy:0.9429 Error: 0.09697 Loss:0.05925 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.75s\n", - "1141408 Examples seen. Accuracy:0.9422 Error: 0.13361 Loss:0.14225 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.64s\n", - "1142048 Examples seen. Accuracy:0.9419 Error: 0.15342 Loss:0.12560 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.65s\n", - "1142688 Examples seen. Accuracy:0.9419 Error: 0.12563 Loss:0.09840 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.64s\n", - "1143328 Examples seen. Accuracy:0.9433 Error: 0.14517 Loss:0.11939 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.67s\n", - "1143968 Examples seen. Accuracy:0.9434 Error: 0.10243 Loss:0.07846 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "1144608 Examples seen. Accuracy:0.9435 Error: 0.06028 Loss:0.03717 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.64s\n", - "1145248 Examples seen. Accuracy:0.9437 Error: 0.14712 Loss:0.11465 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1145888 Examples seen. Accuracy:0.9445 Error: 0.17405 Loss:0.22456 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1146528 Examples seen. Accuracy:0.9452 Error: 0.07290 Loss:0.07826 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.65s\n", - "1147168 Examples seen. Accuracy:0.9450 Error: 0.07994 Loss:0.07179 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 23 Examples seen:1147792 Validation Accuracy: 0.9783 Validation Error: 0.0633 Validation Loss: 0.0684 Total time: 126.46min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 23. Working time: 2.11 hours.\n", - "1148432 Examples seen. Accuracy:0.9444 Error: 0.10659 Loss:0.08178 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "1149072 Examples seen. Accuracy:0.9446 Error: 0.08228 Loss:0.05780 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.75s\n", - "1149712 Examples seen. Accuracy:0.9461 Error: 0.06611 Loss:0.04354 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.69s\n", - "1150352 Examples seen. Accuracy:0.9443 Error: 0.14121 Loss:0.09172 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.71s\n", - "1150992 Examples seen. Accuracy:0.9447 Error: 0.11143 Loss:0.10094 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.75s\n", - "1151632 Examples seen. Accuracy:0.9441 Error: 0.20640 Loss:0.18981 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.68s\n", - "1152272 Examples seen. Accuracy:0.9451 Error: 0.09881 Loss:0.06911 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 4.14s\n", - "1152912 Examples seen. Accuracy:0.9447 Error: 0.13251 Loss:0.15502 Threads: 8 Forward time: 5.09s Backward time: 3.34s Step time: 3.71s\n", - "1153552 Examples seen. Accuracy:0.9452 Error: 0.16591 Loss:0.12209 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.71s\n", - "1154192 Examples seen. Accuracy:0.9454 Error: 0.07522 Loss:0.05972 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 3.67s\n", - "1154832 Examples seen. Accuracy:0.9453 Error: 0.11190 Loss:0.10006 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.67s\n", - "1155472 Examples seen. Accuracy:0.9465 Error: 0.11870 Loss:0.14309 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.65s\n", - "1156112 Examples seen. Accuracy:0.9463 Error: 0.14962 Loss:0.15763 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "1156752 Examples seen. Accuracy:0.9457 Error: 0.14971 Loss:0.17850 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.68s\n", - "1157392 Examples seen. Accuracy:0.9460 Error: 0.15542 Loss:0.18851 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.67s\n", - "1158032 Examples seen. Accuracy:0.9460 Error: 0.17875 Loss:0.16656 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.62s\n", - "1158672 Examples seen. Accuracy:0.9469 Error: 0.16663 Loss:0.16340 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.63s\n", - "1159312 Examples seen. Accuracy:0.9479 Error: 0.23977 Loss:0.25127 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1159952 Examples seen. Accuracy:0.9487 Error: 0.05452 Loss:0.04071 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.66s\n", - "1160592 Examples seen. Accuracy:0.9480 Error: 0.22770 Loss:0.28022 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.67s\n", - "1161232 Examples seen. Accuracy:0.9476 Error: 0.20605 Loss:0.20552 Threads: 8 Forward time: 4.99s Backward time: 3.31s Step time: 3.66s\n", - "1161872 Examples seen. Accuracy:0.9480 Error: 0.10632 Loss:0.07728 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.80s\n", - "1162512 Examples seen. Accuracy:0.9470 Error: 0.17529 Loss:0.17325 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.68s\n", - "1163152 Examples seen. Accuracy:0.9477 Error: 0.07620 Loss:0.04764 Threads: 8 Forward time: 4.96s Backward time: 3.32s Step time: 3.70s\n", - "1163792 Examples seen. Accuracy:0.9470 Error: 0.19919 Loss:0.23673 Threads: 8 Forward time: 5.14s Backward time: 3.35s Step time: 3.79s\n", - "1164432 Examples seen. Accuracy:0.9476 Error: 0.10805 Loss:0.10318 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.75s\n", - "1165072 Examples seen. Accuracy:0.9473 Error: 0.10546 Loss:0.11530 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.71s\n", - "1165712 Examples seen. Accuracy:0.9480 Error: 0.09670 Loss:0.07497 Threads: 8 Forward time: 5.03s Backward time: 3.28s Step time: 3.70s\n", - "1166352 Examples seen. Accuracy:0.9486 Error: 0.21044 Loss:0.22811 Threads: 8 Forward time: 4.98s Backward time: 3.29s Step time: 3.71s\n", - "1166992 Examples seen. Accuracy:0.9496 Error: 0.15267 Loss:0.19842 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 4.68s\n", - "1167632 Examples seen. Accuracy:0.9499 Error: 0.11576 Loss:0.09334 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 4.00s\n", - "1168272 Examples seen. Accuracy:0.9487 Error: 0.25070 Loss:0.26194 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 4.73s\n", - "1168912 Examples seen. Accuracy:0.9490 Error: 0.15279 Loss:0.13863 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.79s\n", - "1169552 Examples seen. Accuracy:0.9496 Error: 0.13206 Loss:0.15998 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.91s\n", - "1170192 Examples seen. Accuracy:0.9484 Error: 0.20045 Loss:0.28935 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.79s\n", - "1170832 Examples seen. Accuracy:0.9476 Error: 0.18495 Loss:0.19545 Threads: 8 Forward time: 4.97s Backward time: 3.32s Step time: 3.80s\n", - "1171472 Examples seen. Accuracy:0.9464 Error: 0.09976 Loss:0.08665 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.79s\n", - "1172112 Examples seen. Accuracy:0.9461 Error: 0.14884 Loss:0.14387 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.80s\n", - "1172752 Examples seen. Accuracy:0.9453 Error: 0.14433 Loss:0.12570 Threads: 8 Forward time: 5.07s Backward time: 3.30s Step time: 3.79s\n", - "1173392 Examples seen. Accuracy:0.9448 Error: 0.20144 Loss:0.20165 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.74s\n", - "1174032 Examples seen. Accuracy:0.9453 Error: 0.14749 Loss:0.13590 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.68s\n", - "1174672 Examples seen. Accuracy:0.9438 Error: 0.10591 Loss:0.07765 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.78s\n", - "1175312 Examples seen. Accuracy:0.9432 Error: 0.15394 Loss:0.14875 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.71s\n", - "1175952 Examples seen. Accuracy:0.9441 Error: 0.21772 Loss:0.19824 Threads: 8 Forward time: 5.00s Backward time: 3.31s Step time: 3.70s\n", - "1176592 Examples seen. Accuracy:0.9443 Error: 0.13137 Loss:0.09814 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.68s\n", - "1177232 Examples seen. Accuracy:0.9441 Error: 0.14829 Loss:0.16312 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.72s\n", - "1177872 Examples seen. Accuracy:0.9444 Error: 0.12328 Loss:0.11496 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.70s\n", - "1178512 Examples seen. Accuracy:0.9449 Error: 0.09082 Loss:0.05992 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.70s\n", - "1179152 Examples seen. Accuracy:0.9454 Error: 0.16169 Loss:0.15060 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "1179792 Examples seen. Accuracy:0.9446 Error: 0.16590 Loss:0.20085 Threads: 8 Forward time: 5.03s Backward time: 3.32s Step time: 3.71s\n", - "1180432 Examples seen. Accuracy:0.9450 Error: 0.11239 Loss:0.07653 Threads: 8 Forward time: 4.99s Backward time: 3.35s Step time: 3.73s\n", - "1181072 Examples seen. Accuracy:0.9459 Error: 0.18015 Loss:0.14598 Threads: 8 Forward time: 4.98s Backward time: 3.32s Step time: 4.31s\n", - "1181712 Examples seen. Accuracy:0.9442 Error: 0.14710 Loss:0.13437 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.71s\n", - "1182352 Examples seen. Accuracy:0.9448 Error: 0.09371 Loss:0.06447 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.68s\n", - "1182992 Examples seen. Accuracy:0.9437 Error: 0.10926 Loss:0.14950 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1183632 Examples seen. Accuracy:0.9440 Error: 0.19048 Loss:0.14861 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "1184272 Examples seen. Accuracy:0.9433 Error: 0.11949 Loss:0.13171 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.68s\n", - "1184912 Examples seen. Accuracy:0.9441 Error: 0.21216 Loss:0.18976 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1185552 Examples seen. Accuracy:0.9440 Error: 0.15406 Loss:0.12353 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.66s\n", - "1186192 Examples seen. Accuracy:0.9441 Error: 0.10416 Loss:0.09012 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.66s\n", - "1186832 Examples seen. Accuracy:0.9457 Error: 0.04256 Loss:0.02493 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.67s\n", - "1187472 Examples seen. Accuracy:0.9476 Error: 0.06884 Loss:0.04220 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1188112 Examples seen. Accuracy:0.9477 Error: 0.09049 Loss:0.19988 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.68s\n", - "1188752 Examples seen. Accuracy:0.9478 Error: 0.16043 Loss:0.21767 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.68s\n", - "1189392 Examples seen. Accuracy:0.9473 Error: 0.07894 Loss:0.06675 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.67s\n", - "1190032 Examples seen. Accuracy:0.9458 Error: 0.22327 Loss:0.21855 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.69s\n", - "1190672 Examples seen. Accuracy:0.9463 Error: 0.19136 Loss:0.43410 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.68s\n", - "1191312 Examples seen. Accuracy:0.9458 Error: 0.16086 Loss:0.14695 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.70s\n", - "1191952 Examples seen. Accuracy:0.9452 Error: 0.16623 Loss:0.13273 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.67s\n", - "1192592 Examples seen. Accuracy:0.9443 Error: 0.15744 Loss:0.19732 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 3.70s\n", - "1193232 Examples seen. Accuracy:0.9451 Error: 0.08387 Loss:0.10092 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.69s\n", - "1193872 Examples seen. Accuracy:0.9421 Error: 0.21001 Loss:0.25713 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1194512 Examples seen. Accuracy:0.9418 Error: 0.13565 Loss:0.19294 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "1195152 Examples seen. Accuracy:0.9420 Error: 0.16291 Loss:0.19515 Threads: 8 Forward time: 4.99s Backward time: 3.30s Step time: 3.68s\n", - "1195792 Examples seen. Accuracy:0.9427 Error: 0.11184 Loss:0.10038 Threads: 8 Forward time: 4.93s Backward time: 3.30s Step time: 3.68s\n", - "1196432 Examples seen. Accuracy:0.9437 Error: 0.10837 Loss:0.11333 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "1197072 Examples seen. Accuracy:0.9454 Error: 0.14414 Loss:0.13828 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 24 Examples seen:1197696 Validation Accuracy: 0.9804 Validation Error: 0.0610 Validation Loss: 0.0669 Total time: 131.81min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.409 Min Weight: -0.325 Max Output: 5.613 Min Output: -5.392 TNNetConvolutionLinear 66,66,64 Times: 8.60s 0.40s Parent:0\n", - "Layer 2 Max Output: 5.613 Min Output: -3.228 TNNetMaxPool 33,33,64 Times: 3.59s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.605 Min Weight: 0.262 Max Output: 8.851 Min Output: -5.771 TNNetMovingStdNormalization 33,33,64 Times: 0.29s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.332 Min Weight: -0.205 Max Output: 10.548 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.90s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.346 Min Weight: -0.338 Max Output: 10.886 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.89s 0.18s Parent:4\n", - "Layer 6 Max Output: 10.886 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.50s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.377 Min Weight: -0.248 Max Output: 7.765 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.255 Min Weight: -0.235 Max Output: 5.471 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.46s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.235 Min Weight: -0.208 Max Output: 8.848 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.48s 0.02s Parent:8\n", - "Layer 10 Max Output: 8.848 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 8.848 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.385 Min Weight: -0.375 Max Output: 25.601 Min Output: -10.537 TNNetFullConnectLinear 39,1,1 Times: 0.04s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 24. Working time: 2.2 hours.\n", - "1198336 Examples seen. Accuracy:0.9468 Error: 0.26289 Loss:0.27406 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.81s\n", - "1198976 Examples seen. Accuracy:0.9477 Error: 0.15083 Loss:0.18740 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 4.45s\n", - "1199616 Examples seen. Accuracy:0.9490 Error: 0.14372 Loss:0.12510 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.63s\n", - "1200256 Examples seen. Accuracy:0.9485 Error: 0.08716 Loss:0.07109 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.65s\n", - "1200896 Examples seen. Accuracy:0.9488 Error: 0.10198 Loss:0.11925 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.60s\n", - "1201536 Examples seen. Accuracy:0.9476 Error: 0.06668 Loss:0.04838 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.61s\n", - "1202176 Examples seen. Accuracy:0.9473 Error: 0.08376 Loss:0.05120 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.65s\n", - "1202816 Examples seen. Accuracy:0.9459 Error: 0.26784 Loss:0.26125 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.63s\n", - "1203456 Examples seen. Accuracy:0.9460 Error: 0.09583 Loss:0.08170 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.64s\n", - "1204096 Examples seen. Accuracy:0.9433 Error: 0.15850 Loss:0.11102 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.64s\n", - "1204736 Examples seen. Accuracy:0.9440 Error: 0.12450 Loss:0.10567 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.65s\n", - "1205376 Examples seen. Accuracy:0.9444 Error: 0.09275 Loss:0.07461 Threads: 8 Forward time: 5.09s Backward time: 3.28s Step time: 3.65s\n", - "1206016 Examples seen. Accuracy:0.9435 Error: 0.09913 Loss:0.09007 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 4.32s\n", - "1206656 Examples seen. Accuracy:0.9436 Error: 0.16410 Loss:0.16115 Threads: 8 Forward time: 5.11s Backward time: 3.29s Step time: 3.75s\n", - "1207296 Examples seen. Accuracy:0.9432 Error: 0.21248 Loss:0.19766 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.64s\n", - "1207936 Examples seen. Accuracy:0.9436 Error: 0.14624 Loss:0.19549 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.61s\n", - "1208576 Examples seen. Accuracy:0.9435 Error: 0.22112 Loss:0.26161 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.60s\n", - "1209216 Examples seen. Accuracy:0.9435 Error: 0.22516 Loss:0.21452 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.60s\n", - "1209856 Examples seen. Accuracy:0.9450 Error: 0.13971 Loss:0.15646 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.61s\n", - "1210496 Examples seen. Accuracy:0.9453 Error: 0.10825 Loss:0.18735 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.60s\n", - "1211136 Examples seen. Accuracy:0.9451 Error: 0.18021 Loss:0.15731 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.63s\n", - "1211776 Examples seen. Accuracy:0.9446 Error: 0.15781 Loss:0.14680 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.62s\n", - "1212416 Examples seen. Accuracy:0.9463 Error: 0.08204 Loss:0.05657 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.62s\n", - "1213056 Examples seen. Accuracy:0.9462 Error: 0.08583 Loss:0.07119 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.63s\n", - "1213696 Examples seen. Accuracy:0.9461 Error: 0.08549 Loss:0.06467 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.63s\n", - "1214336 Examples seen. Accuracy:0.9450 Error: 0.14863 Loss:0.14249 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.62s\n", - "1214976 Examples seen. Accuracy:0.9445 Error: 0.12561 Loss:0.15898 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.63s\n", - "1215616 Examples seen. Accuracy:0.9435 Error: 0.15151 Loss:0.15308 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1216256 Examples seen. Accuracy:0.9450 Error: 0.08567 Loss:0.09853 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.68s\n", - "1216896 Examples seen. Accuracy:0.9448 Error: 0.08823 Loss:0.08829 Threads: 8 Forward time: 4.88s Backward time: 3.29s Step time: 3.61s\n", - "1217536 Examples seen. Accuracy:0.9437 Error: 0.11377 Loss:0.09614 Threads: 8 Forward time: 4.91s Backward time: 3.27s Step time: 3.62s\n", - "1218176 Examples seen. Accuracy:0.9448 Error: 0.15327 Loss:0.15363 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.61s\n", - "1218816 Examples seen. Accuracy:0.9433 Error: 0.15426 Loss:0.12756 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.61s\n", - "1219456 Examples seen. Accuracy:0.9436 Error: 0.10868 Loss:0.08253 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.62s\n", - "1220096 Examples seen. Accuracy:0.9447 Error: 0.17057 Loss:0.19539 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.62s\n", - "1220736 Examples seen. Accuracy:0.9441 Error: 0.11697 Loss:0.10937 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.63s\n", - "1221376 Examples seen. Accuracy:0.9428 Error: 0.16507 Loss:0.13780 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.61s\n", - "1222016 Examples seen. Accuracy:0.9426 Error: 0.15613 Loss:0.12696 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.62s\n", - "1222656 Examples seen. Accuracy:0.9438 Error: 0.15462 Loss:0.12980 Threads: 8 Forward time: 4.90s Backward time: 3.21s Step time: 3.60s\n", - "1223296 Examples seen. Accuracy:0.9418 Error: 0.27787 Loss:0.32166 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.69s\n", - "1223936 Examples seen. Accuracy:0.9426 Error: 0.21598 Loss:0.26784 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1224576 Examples seen. Accuracy:0.9438 Error: 0.10671 Loss:0.08176 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.63s\n", - "1225216 Examples seen. Accuracy:0.9444 Error: 0.24735 Loss:0.45198 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "1225856 Examples seen. Accuracy:0.9439 Error: 0.13822 Loss:0.14409 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.62s\n", - "1226496 Examples seen. Accuracy:0.9445 Error: 0.16606 Loss:0.16985 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.62s\n", - "1227136 Examples seen. Accuracy:0.9449 Error: 0.15579 Loss:0.14052 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.66s\n", - "1227776 Examples seen. Accuracy:0.9455 Error: 0.15393 Loss:0.16454 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "1228416 Examples seen. Accuracy:0.9464 Error: 0.13151 Loss:0.09741 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "1229056 Examples seen. Accuracy:0.9470 Error: 0.14473 Loss:0.14964 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.65s\n", - "1229696 Examples seen. Accuracy:0.9470 Error: 0.10573 Loss:0.09065 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.66s\n", - "1230336 Examples seen. Accuracy:0.9473 Error: 0.17484 Loss:0.16137 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.66s\n", - "1230976 Examples seen. Accuracy:0.9456 Error: 0.18295 Loss:0.16258 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.65s\n", - "1231616 Examples seen. Accuracy:0.9466 Error: 0.11990 Loss:0.09036 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.65s\n", - "1232256 Examples seen. Accuracy:0.9471 Error: 0.19462 Loss:0.16939 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.67s\n", - "1232896 Examples seen. Accuracy:0.9461 Error: 0.13283 Loss:0.12480 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.66s\n", - "1233536 Examples seen. Accuracy:0.9451 Error: 0.18658 Loss:0.20118 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.67s\n", - "1234176 Examples seen. Accuracy:0.9460 Error: 0.12775 Loss:0.10271 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.68s\n", - "1234816 Examples seen. Accuracy:0.9472 Error: 0.15311 Loss:0.13163 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.67s\n", - "1235456 Examples seen. Accuracy:0.9478 Error: 0.12933 Loss:0.11709 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.69s\n", - "1236096 Examples seen. Accuracy:0.9479 Error: 0.27455 Loss:0.31081 Threads: 8 Forward time: 4.96s Backward time: 3.29s Step time: 4.16s\n", - "1236736 Examples seen. Accuracy:0.9474 Error: 0.13430 Loss:0.11347 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1237376 Examples seen. Accuracy:0.9478 Error: 0.14763 Loss:0.11102 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.71s\n", - "1238016 Examples seen. Accuracy:0.9500 Error: 0.16345 Loss:0.16945 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1238656 Examples seen. Accuracy:0.9497 Error: 0.11825 Loss:0.09453 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.64s\n", - "1239296 Examples seen. Accuracy:0.9492 Error: 0.14777 Loss:0.10634 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.65s\n", - "1239936 Examples seen. Accuracy:0.9488 Error: 0.06227 Loss:0.06355 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.67s\n", - "1240576 Examples seen. Accuracy:0.9479 Error: 0.06274 Loss:0.03663 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "1241216 Examples seen. Accuracy:0.9473 Error: 0.14335 Loss:0.15098 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.62s\n", - "1241856 Examples seen. Accuracy:0.9465 Error: 0.18920 Loss:0.23447 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.64s\n", - "1242496 Examples seen. Accuracy:0.9465 Error: 0.26508 Loss:0.32459 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "1243136 Examples seen. Accuracy:0.9469 Error: 0.17908 Loss:0.16225 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.64s\n", - "1243776 Examples seen. Accuracy:0.9479 Error: 0.12929 Loss:0.10541 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.70s\n", - "1244416 Examples seen. Accuracy:0.9484 Error: 0.13879 Loss:0.13635 Threads: 8 Forward time: 5.07s Backward time: 3.32s Step time: 3.67s\n", - "1245056 Examples seen. Accuracy:0.9479 Error: 0.22548 Loss:0.26002 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.65s\n", - "1245696 Examples seen. Accuracy:0.9481 Error: 0.11780 Loss:0.08024 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.62s\n", - "1246336 Examples seen. Accuracy:0.9487 Error: 0.13710 Loss:0.15133 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.63s\n", - "1246976 Examples seen. Accuracy:0.9501 Error: 0.15814 Loss:0.16433 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 25 Examples seen:1247600 Validation Accuracy: 0.9801 Validation Error: 0.0592 Validation Loss: 0.0645 Total time: 137.05min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 25. Working time: 2.28 hours.\n", - "1248240 Examples seen. Accuracy:0.9493 Error: 0.19384 Loss:0.27351 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.66s\n", - "1248880 Examples seen. Accuracy:0.9504 Error: 0.16422 Loss:0.12801 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.64s\n", - "1249520 Examples seen. Accuracy:0.9513 Error: 0.14088 Loss:0.22432 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.63s\n", - "1250160 Examples seen. Accuracy:0.9514 Error: 0.16068 Loss:0.20091 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "1250800 Examples seen. Accuracy:0.9509 Error: 0.07353 Loss:0.06080 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.65s\n", - "1251440 Examples seen. Accuracy:0.9494 Error: 0.11821 Loss:0.13711 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.65s\n", - "1252080 Examples seen. Accuracy:0.9483 Error: 0.13161 Loss:0.10466 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.65s\n", - "1252720 Examples seen. Accuracy:0.9478 Error: 0.19342 Loss:0.29328 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.69s\n", - "1253360 Examples seen. Accuracy:0.9468 Error: 0.26612 Loss:0.23271 Threads: 8 Forward time: 4.94s Backward time: 3.28s Step time: 3.68s\n", - "1254000 Examples seen. Accuracy:0.9451 Error: 0.25302 Loss:0.25177 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.65s\n", - "1254640 Examples seen. Accuracy:0.9447 Error: 0.14129 Loss:0.09581 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.65s\n", - "1255280 Examples seen. Accuracy:0.9439 Error: 0.12214 Loss:0.09685 Threads: 8 Forward time: 4.95s Backward time: 3.29s Step time: 3.64s\n", - "1255920 Examples seen. Accuracy:0.9436 Error: 0.15051 Loss:0.16222 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.61s\n", - "1256560 Examples seen. Accuracy:0.9444 Error: 0.18626 Loss:0.22004 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.65s\n", - "1257200 Examples seen. Accuracy:0.9443 Error: 0.14887 Loss:0.24160 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.63s\n", - "1257840 Examples seen. Accuracy:0.9454 Error: 0.10066 Loss:0.06353 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.65s\n", - "1258480 Examples seen. Accuracy:0.9453 Error: 0.13658 Loss:0.10336 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.65s\n", - "1259120 Examples seen. Accuracy:0.9463 Error: 0.18036 Loss:0.16815 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "1259760 Examples seen. Accuracy:0.9455 Error: 0.16396 Loss:0.22196 Threads: 8 Forward time: 5.04s Backward time: 3.30s Step time: 3.65s\n", - "1260400 Examples seen. Accuracy:0.9452 Error: 0.19549 Loss:0.23918 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.65s\n", - "1261040 Examples seen. Accuracy:0.9441 Error: 0.12265 Loss:0.08074 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 4.30s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1261680 Examples seen. Accuracy:0.9429 Error: 0.18740 Loss:0.21537 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.73s\n", - "1262320 Examples seen. Accuracy:0.9433 Error: 0.15901 Loss:0.17846 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.67s\n", - "1262960 Examples seen. Accuracy:0.9442 Error: 0.07459 Loss:0.05525 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.68s\n", - "1263600 Examples seen. Accuracy:0.9456 Error: 0.13322 Loss:0.10159 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.75s\n", - "1264240 Examples seen. Accuracy:0.9446 Error: 0.20158 Loss:0.30780 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "1264880 Examples seen. Accuracy:0.9439 Error: 0.10327 Loss:0.08684 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.69s\n", - "1265520 Examples seen. Accuracy:0.9444 Error: 0.12762 Loss:0.10777 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.69s\n", - "1266160 Examples seen. Accuracy:0.9442 Error: 0.24294 Loss:0.27773 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1266800 Examples seen. Accuracy:0.9443 Error: 0.11615 Loss:0.09435 Threads: 8 Forward time: 5.17s Backward time: 3.37s Step time: 3.74s\n", - "1267440 Examples seen. Accuracy:0.9447 Error: 0.05781 Loss:0.03715 Threads: 8 Forward time: 4.92s Backward time: 3.27s Step time: 3.70s\n", - "1268080 Examples seen. Accuracy:0.9441 Error: 0.10428 Loss:0.08833 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.68s\n", - "1268720 Examples seen. Accuracy:0.9439 Error: 0.10423 Loss:0.06373 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.67s\n", - "1269360 Examples seen. Accuracy:0.9434 Error: 0.15460 Loss:0.14539 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1270000 Examples seen. Accuracy:0.9427 Error: 0.18121 Loss:0.15550 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.71s\n", - "1270640 Examples seen. Accuracy:0.9435 Error: 0.06787 Loss:0.04183 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.74s\n", - "1271280 Examples seen. Accuracy:0.9446 Error: 0.10047 Loss:0.07592 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1271920 Examples seen. Accuracy:0.9452 Error: 0.20400 Loss:0.19751 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.72s\n", - "1272560 Examples seen. Accuracy:0.9471 Error: 0.15276 Loss:0.15829 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.70s\n", - "1273200 Examples seen. Accuracy:0.9468 Error: 0.15180 Loss:0.14723 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "1273840 Examples seen. Accuracy:0.9453 Error: 0.22588 Loss:0.46818 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.69s\n", - "1274480 Examples seen. Accuracy:0.9452 Error: 0.10178 Loss:0.07394 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.70s\n", - "1275120 Examples seen. Accuracy:0.9444 Error: 0.18727 Loss:0.14680 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "1275760 Examples seen. Accuracy:0.9449 Error: 0.16739 Loss:0.15439 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.69s\n", - "1276400 Examples seen. Accuracy:0.9441 Error: 0.18841 Loss:0.22804 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.72s\n", - "1277040 Examples seen. Accuracy:0.9448 Error: 0.09128 Loss:0.06777 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.72s\n", - "1277680 Examples seen. Accuracy:0.9452 Error: 0.10307 Loss:0.06522 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.69s\n", - "1278320 Examples seen. Accuracy:0.9450 Error: 0.15240 Loss:0.10682 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.69s\n", - "1278960 Examples seen. Accuracy:0.9451 Error: 0.11009 Loss:0.08482 Threads: 8 Forward time: 5.01s Backward time: 3.31s Step time: 3.71s\n", - "1279600 Examples seen. Accuracy:0.9449 Error: 0.09153 Loss:0.08361 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.70s\n", - "1280240 Examples seen. Accuracy:0.9456 Error: 0.13692 Loss:0.13277 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.68s\n", - "1280880 Examples seen. Accuracy:0.9467 Error: 0.09231 Loss:0.06253 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.71s\n", - "1281520 Examples seen. Accuracy:0.9477 Error: 0.10891 Loss:0.07700 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.70s\n", - "1282160 Examples seen. Accuracy:0.9466 Error: 0.10718 Loss:0.18481 Threads: 8 Forward time: 5.05s Backward time: 3.30s Step time: 3.71s\n", - "1282800 Examples seen. Accuracy:0.9466 Error: 0.16089 Loss:0.23117 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.73s\n", - "1283440 Examples seen. Accuracy:0.9464 Error: 0.08310 Loss:0.06702 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.71s\n", - "1284080 Examples seen. Accuracy:0.9455 Error: 0.08818 Loss:0.05569 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.72s\n", - "1284720 Examples seen. Accuracy:0.9457 Error: 0.04130 Loss:0.02248 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.71s\n", - "1285360 Examples seen. Accuracy:0.9461 Error: 0.06465 Loss:0.04299 Threads: 8 Forward time: 5.07s Backward time: 3.32s Step time: 3.77s\n", - "1286000 Examples seen. Accuracy:0.9472 Error: 0.10965 Loss:0.09964 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.77s\n", - "1286640 Examples seen. Accuracy:0.9489 Error: 0.19077 Loss:0.15463 Threads: 8 Forward time: 5.13s Backward time: 3.33s Step time: 3.75s\n", - "1287280 Examples seen. Accuracy:0.9477 Error: 0.26201 Loss:0.36590 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.74s\n", - "1287920 Examples seen. Accuracy:0.9470 Error: 0.20909 Loss:0.19749 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.79s\n", - "1288560 Examples seen. Accuracy:0.9466 Error: 0.16427 Loss:0.17279 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.81s\n", - "1289200 Examples seen. Accuracy:0.9469 Error: 0.13868 Loss:0.14798 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.72s\n", - "1289840 Examples seen. Accuracy:0.9455 Error: 0.16524 Loss:0.20594 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.72s\n", - "1290480 Examples seen. Accuracy:0.9474 Error: 0.10920 Loss:0.09763 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.76s\n", - "1291120 Examples seen. Accuracy:0.9475 Error: 0.17874 Loss:0.12385 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 4.22s\n", - "1291760 Examples seen. Accuracy:0.9467 Error: 0.17514 Loss:0.18256 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.65s\n", - "1292400 Examples seen. Accuracy:0.9453 Error: 0.23528 Loss:0.23100 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.64s\n", - "1293040 Examples seen. Accuracy:0.9456 Error: 0.25035 Loss:0.23062 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.70s\n", - "1293680 Examples seen. Accuracy:0.9451 Error: 0.17332 Loss:0.24107 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.73s\n", - "1294320 Examples seen. Accuracy:0.9463 Error: 0.10139 Loss:0.11800 Threads: 8 Forward time: 4.93s Backward time: 3.31s Step time: 3.67s\n", - "1294960 Examples seen. Accuracy:0.9474 Error: 0.14364 Loss:0.10782 Threads: 8 Forward time: 5.01s Backward time: 3.33s Step time: 3.75s\n", - "1295600 Examples seen. Accuracy:0.9475 Error: 0.10474 Loss:0.07952 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.66s\n", - "1296240 Examples seen. Accuracy:0.9463 Error: 0.20403 Loss:0.24117 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.73s\n", - "1296880 Examples seen. Accuracy:0.9453 Error: 0.15086 Loss:0.14591 Threads: 8 Forward time: 4.89s Backward time: 3.27s Step time: 3.63s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 26 Examples seen:1297504 Validation Accuracy: 0.9815 Validation Error: 0.0580 Validation Loss: 0.0641 Total time: 142.35min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 26. Working time: 2.37 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1298144 Examples seen. Accuracy:0.9446 Error: 0.21471 Loss:0.22340 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "1298784 Examples seen. Accuracy:0.9432 Error: 0.32632 Loss:0.38966 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.64s\n", - "1299424 Examples seen. Accuracy:0.9423 Error: 0.17222 Loss:0.11502 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1300064 Examples seen. Accuracy:0.9436 Error: 0.06403 Loss:0.03990 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.64s\n", - "1300704 Examples seen. Accuracy:0.9421 Error: 0.25386 Loss:0.38098 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "1301344 Examples seen. Accuracy:0.9437 Error: 0.19014 Loss:0.19647 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "1301984 Examples seen. Accuracy:0.9435 Error: 0.12837 Loss:0.09401 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.64s\n", - "1302624 Examples seen. Accuracy:0.9434 Error: 0.12868 Loss:0.15914 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.63s\n", - "1303264 Examples seen. Accuracy:0.9434 Error: 0.18031 Loss:0.17309 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.63s\n", - "1303904 Examples seen. Accuracy:0.9430 Error: 0.16687 Loss:0.21385 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.63s\n", - "1304544 Examples seen. Accuracy:0.9435 Error: 0.13974 Loss:0.13453 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.64s\n", - "1305184 Examples seen. Accuracy:0.9444 Error: 0.13430 Loss:0.10248 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.65s\n", - "1305824 Examples seen. Accuracy:0.9462 Error: 0.06340 Loss:0.03965 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "1306464 Examples seen. Accuracy:0.9460 Error: 0.07035 Loss:0.06770 Threads: 8 Forward time: 4.95s Backward time: 3.28s Step time: 3.64s\n", - "1307104 Examples seen. Accuracy:0.9454 Error: 0.12145 Loss:0.11008 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1307744 Examples seen. Accuracy:0.9443 Error: 0.16507 Loss:0.16787 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.69s\n", - "1308384 Examples seen. Accuracy:0.9448 Error: 0.17914 Loss:0.18987 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1309024 Examples seen. Accuracy:0.9444 Error: 0.15677 Loss:0.18336 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1309664 Examples seen. Accuracy:0.9425 Error: 0.21995 Loss:0.23747 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "1310304 Examples seen. Accuracy:0.9430 Error: 0.16408 Loss:0.15231 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "1310944 Examples seen. Accuracy:0.9431 Error: 0.12083 Loss:0.12240 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.69s\n", - "1311584 Examples seen. Accuracy:0.9441 Error: 0.20318 Loss:0.22036 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "1312224 Examples seen. Accuracy:0.9442 Error: 0.20686 Loss:0.26510 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "1312864 Examples seen. Accuracy:0.9438 Error: 0.06887 Loss:0.04485 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1313504 Examples seen. Accuracy:0.9436 Error: 0.06161 Loss:0.08989 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "1314144 Examples seen. Accuracy:0.9430 Error: 0.10659 Loss:0.08089 Threads: 8 Forward time: 5.01s Backward time: 3.30s Step time: 3.67s\n", - "1314784 Examples seen. Accuracy:0.9438 Error: 0.12844 Loss:0.16150 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "1315424 Examples seen. Accuracy:0.9429 Error: 0.09915 Loss:0.24830 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.66s\n", - "1316064 Examples seen. Accuracy:0.9427 Error: 0.17798 Loss:0.18002 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 4.26s\n", - "1316704 Examples seen. Accuracy:0.9432 Error: 0.10833 Loss:0.07414 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.67s\n", - "1317344 Examples seen. Accuracy:0.9427 Error: 0.10232 Loss:0.07334 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.70s\n", - "1317984 Examples seen. Accuracy:0.9431 Error: 0.17990 Loss:0.16969 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.67s\n", - "1318624 Examples seen. Accuracy:0.9424 Error: 0.14679 Loss:0.18408 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.65s\n", - "1319264 Examples seen. Accuracy:0.9435 Error: 0.17855 Loss:0.23966 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1319904 Examples seen. Accuracy:0.9427 Error: 0.07152 Loss:0.04362 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.68s\n", - "1320544 Examples seen. Accuracy:0.9438 Error: 0.08435 Loss:0.11088 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.68s\n", - "1321184 Examples seen. Accuracy:0.9440 Error: 0.08809 Loss:0.15140 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.72s\n", - "1321824 Examples seen. Accuracy:0.9433 Error: 0.15179 Loss:0.10814 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "1322464 Examples seen. Accuracy:0.9432 Error: 0.06831 Loss:0.04797 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.72s\n", - "1323104 Examples seen. Accuracy:0.9425 Error: 0.20406 Loss:0.26747 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "1323744 Examples seen. Accuracy:0.9442 Error: 0.09283 Loss:0.09767 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.67s\n", - "1324384 Examples seen. Accuracy:0.9441 Error: 0.15506 Loss:0.17069 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.63s\n", - "1325024 Examples seen. Accuracy:0.9442 Error: 0.11763 Loss:0.18047 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.66s\n", - "1325664 Examples seen. Accuracy:0.9445 Error: 0.18907 Loss:0.15231 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "1326304 Examples seen. Accuracy:0.9443 Error: 0.22853 Loss:0.24156 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1326944 Examples seen. Accuracy:0.9451 Error: 0.09871 Loss:0.08006 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "1327584 Examples seen. Accuracy:0.9453 Error: 0.17748 Loss:0.16216 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1328224 Examples seen. Accuracy:0.9455 Error: 0.14607 Loss:0.13312 Threads: 8 Forward time: 5.16s Backward time: 3.37s Step time: 4.33s\n", - "1328864 Examples seen. Accuracy:0.9457 Error: 0.21569 Loss:0.18745 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 4.59s\n", - "1329504 Examples seen. Accuracy:0.9466 Error: 0.19779 Loss:0.33058 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.69s\n", - "1330144 Examples seen. Accuracy:0.9474 Error: 0.18095 Loss:0.21181 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.65s\n", - "1330784 Examples seen. Accuracy:0.9472 Error: 0.10384 Loss:0.09726 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.70s\n", - "1331424 Examples seen. Accuracy:0.9459 Error: 0.14583 Loss:0.15175 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "1332064 Examples seen. Accuracy:0.9461 Error: 0.08800 Loss:0.08541 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.72s\n", - "1332704 Examples seen. Accuracy:0.9463 Error: 0.17635 Loss:0.18454 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.73s\n", - "1333344 Examples seen. Accuracy:0.9469 Error: 0.08177 Loss:0.05372 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.77s\n", - "1333984 Examples seen. Accuracy:0.9468 Error: 0.09804 Loss:0.10768 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.67s\n", - "1334624 Examples seen. Accuracy:0.9465 Error: 0.06988 Loss:0.04379 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1335264 Examples seen. Accuracy:0.9475 Error: 0.21247 Loss:0.25398 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.70s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1335904 Examples seen. Accuracy:0.9469 Error: 0.28866 Loss:0.31043 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.73s\n", - "1336544 Examples seen. Accuracy:0.9457 Error: 0.10323 Loss:0.09532 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.72s\n", - "1337184 Examples seen. Accuracy:0.9445 Error: 0.12273 Loss:0.09408 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.69s\n", - "1337824 Examples seen. Accuracy:0.9440 Error: 0.07332 Loss:0.05458 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.73s\n", - "1338464 Examples seen. Accuracy:0.9445 Error: 0.12766 Loss:0.13382 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.73s\n", - "1339104 Examples seen. Accuracy:0.9440 Error: 0.12421 Loss:0.10450 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.71s\n", - "1339744 Examples seen. Accuracy:0.9439 Error: 0.16706 Loss:0.13370 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.71s\n", - "1340384 Examples seen. Accuracy:0.9418 Error: 0.11334 Loss:0.17669 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "1341024 Examples seen. Accuracy:0.9425 Error: 0.18791 Loss:0.17847 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.72s\n", - "1341664 Examples seen. Accuracy:0.9428 Error: 0.17789 Loss:0.15379 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.71s\n", - "1342304 Examples seen. Accuracy:0.9439 Error: 0.12433 Loss:0.07759 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.71s\n", - "1342944 Examples seen. Accuracy:0.9446 Error: 0.08810 Loss:0.05920 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.74s\n", - "1343584 Examples seen. Accuracy:0.9461 Error: 0.05596 Loss:0.04700 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.73s\n", - "1344224 Examples seen. Accuracy:0.9465 Error: 0.05306 Loss:0.04473 Threads: 8 Forward time: 4.99s Backward time: 3.29s Step time: 3.71s\n", - "1344864 Examples seen. Accuracy:0.9470 Error: 0.16632 Loss:0.15710 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.74s\n", - "1345504 Examples seen. Accuracy:0.9480 Error: 0.10568 Loss:0.10736 Threads: 8 Forward time: 5.00s Backward time: 3.29s Step time: 4.26s\n", - "1346144 Examples seen. Accuracy:0.9491 Error: 0.08046 Loss:0.05307 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.66s\n", - "1346784 Examples seen. Accuracy:0.9491 Error: 0.10946 Loss:0.08960 Threads: 8 Forward time: 4.90s Backward time: 3.22s Step time: 3.64s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 27 Examples seen:1347408 Validation Accuracy: 0.9826 Validation Error: 0.0565 Validation Loss: 0.0631 Total time: 147.65min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 27. Working time: 2.46 hours.\n", - "1348048 Examples seen. Accuracy:0.9509 Error: 0.10121 Loss:0.10480 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.83s\n", - "1348688 Examples seen. Accuracy:0.9502 Error: 0.13786 Loss:0.12307 Threads: 8 Forward time: 4.89s Backward time: 3.23s Step time: 3.63s\n", - "1349328 Examples seen. Accuracy:0.9502 Error: 0.10861 Loss:0.11271 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.63s\n", - "1349968 Examples seen. Accuracy:0.9512 Error: 0.16366 Loss:0.15103 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.65s\n", - "1350608 Examples seen. Accuracy:0.9519 Error: 0.08901 Loss:0.08968 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.64s\n", - "1351248 Examples seen. Accuracy:0.9507 Error: 0.08075 Loss:0.06334 Threads: 8 Forward time: 5.03s Backward time: 3.31s Step time: 3.67s\n", - "1351888 Examples seen. Accuracy:0.9489 Error: 0.23113 Loss:0.33942 Threads: 8 Forward time: 5.11s Backward time: 3.26s Step time: 3.68s\n", - "1352528 Examples seen. Accuracy:0.9493 Error: 0.10107 Loss:0.14827 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.63s\n", - "1353168 Examples seen. Accuracy:0.9500 Error: 0.15232 Loss:0.19826 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.66s\n", - "1353808 Examples seen. Accuracy:0.9490 Error: 0.06446 Loss:0.08063 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.62s\n", - "1354448 Examples seen. Accuracy:0.9506 Error: 0.09203 Loss:0.05924 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.59s\n", - "1355088 Examples seen. Accuracy:0.9495 Error: 0.01877 Loss:0.00969 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.58s\n", - "1355728 Examples seen. Accuracy:0.9483 Error: 0.18560 Loss:0.19859 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.60s\n", - "1356368 Examples seen. Accuracy:0.9479 Error: 0.16790 Loss:0.18150 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.61s\n", - "1357008 Examples seen. Accuracy:0.9484 Error: 0.15205 Loss:0.13063 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.62s\n", - "1357648 Examples seen. Accuracy:0.9464 Error: 0.15391 Loss:0.17763 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1358288 Examples seen. Accuracy:0.9462 Error: 0.14777 Loss:0.18169 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.61s\n", - "1358928 Examples seen. Accuracy:0.9464 Error: 0.15986 Loss:0.15231 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.59s\n", - "1359568 Examples seen. Accuracy:0.9470 Error: 0.09436 Loss:0.12597 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.59s\n", - "1360208 Examples seen. Accuracy:0.9468 Error: 0.17975 Loss:0.14399 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.59s\n", - "1360848 Examples seen. Accuracy:0.9472 Error: 0.08552 Loss:0.06134 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.59s\n", - "1361488 Examples seen. Accuracy:0.9472 Error: 0.18226 Loss:0.25142 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.68s\n", - "1362128 Examples seen. Accuracy:0.9486 Error: 0.09666 Loss:0.08678 Threads: 8 Forward time: 5.14s Backward time: 3.27s Step time: 3.66s\n", - "1362768 Examples seen. Accuracy:0.9497 Error: 0.13249 Loss:0.10215 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.68s\n", - "1363408 Examples seen. Accuracy:0.9482 Error: 0.08814 Loss:0.10143 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.64s\n", - "1364048 Examples seen. Accuracy:0.9471 Error: 0.22412 Loss:0.25013 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.74s\n", - "1364688 Examples seen. Accuracy:0.9472 Error: 0.14540 Loss:0.16838 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.72s\n", - "1365328 Examples seen. Accuracy:0.9473 Error: 0.09956 Loss:0.06736 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.67s\n", - "1365968 Examples seen. Accuracy:0.9475 Error: 0.09321 Loss:0.10851 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1366608 Examples seen. Accuracy:0.9488 Error: 0.04632 Loss:0.02714 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.67s\n", - "1367248 Examples seen. Accuracy:0.9468 Error: 0.11566 Loss:0.09503 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "1367888 Examples seen. Accuracy:0.9492 Error: 0.10286 Loss:0.08778 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.66s\n", - "1368528 Examples seen. Accuracy:0.9494 Error: 0.09261 Loss:0.16489 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.66s\n", - "1369168 Examples seen. Accuracy:0.9497 Error: 0.10361 Loss:0.08744 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.66s\n", - "1369808 Examples seen. Accuracy:0.9503 Error: 0.05990 Loss:0.07499 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "1370448 Examples seen. Accuracy:0.9493 Error: 0.19406 Loss:0.33821 Threads: 8 Forward time: 5.16s Backward time: 3.34s Step time: 4.19s\n", - "1371088 Examples seen. Accuracy:0.9496 Error: 0.16123 Loss:0.14536 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.62s\n", - "1371728 Examples seen. Accuracy:0.9494 Error: 0.14936 Loss:0.16657 Threads: 8 Forward time: 4.90s Backward time: 3.27s Step time: 3.60s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1372368 Examples seen. Accuracy:0.9496 Error: 0.11550 Loss:0.09744 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.61s\n", - "1373008 Examples seen. Accuracy:0.9500 Error: 0.13668 Loss:0.13997 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.60s\n", - "1373648 Examples seen. Accuracy:0.9494 Error: 0.13516 Loss:0.15512 Threads: 8 Forward time: 4.91s Backward time: 3.22s Step time: 3.63s\n", - "1374288 Examples seen. Accuracy:0.9494 Error: 0.13733 Loss:0.17051 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.60s\n", - "1374928 Examples seen. Accuracy:0.9490 Error: 0.13899 Loss:0.12188 Threads: 8 Forward time: 4.92s Backward time: 3.26s Step time: 3.60s\n", - "1375568 Examples seen. Accuracy:0.9481 Error: 0.14727 Loss:0.15195 Threads: 8 Forward time: 4.91s Backward time: 3.30s Step time: 3.62s\n", - "1376208 Examples seen. Accuracy:0.9463 Error: 0.21523 Loss:0.21157 Threads: 8 Forward time: 4.94s Backward time: 3.29s Step time: 3.64s\n", - "1376848 Examples seen. Accuracy:0.9460 Error: 0.11469 Loss:0.11323 Threads: 8 Forward time: 5.06s Backward time: 3.31s Step time: 3.63s\n", - "1377488 Examples seen. Accuracy:0.9453 Error: 0.11389 Loss:0.10801 Threads: 8 Forward time: 4.90s Backward time: 3.25s Step time: 3.61s\n", - "1378128 Examples seen. Accuracy:0.9443 Error: 0.17951 Loss:0.18461 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.61s\n", - "1378768 Examples seen. Accuracy:0.9446 Error: 0.11869 Loss:0.10371 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.64s\n", - "1379408 Examples seen. Accuracy:0.9449 Error: 0.13683 Loss:0.10894 Threads: 8 Forward time: 4.91s Backward time: 3.24s Step time: 3.61s\n", - "1380048 Examples seen. Accuracy:0.9462 Error: 0.18831 Loss:0.17881 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.59s\n", - "1380688 Examples seen. Accuracy:0.9457 Error: 0.14097 Loss:0.22286 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.62s\n", - "1381328 Examples seen. Accuracy:0.9456 Error: 0.14346 Loss:0.13224 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.61s\n", - "1381968 Examples seen. Accuracy:0.9474 Error: 0.12830 Loss:0.09888 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.61s\n", - "1382608 Examples seen. Accuracy:0.9472 Error: 0.31545 Loss:0.38050 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.61s\n", - "1383248 Examples seen. Accuracy:0.9477 Error: 0.08129 Loss:0.05224 Threads: 8 Forward time: 4.90s Backward time: 3.26s Step time: 3.62s\n", - "1383888 Examples seen. Accuracy:0.9474 Error: 0.11140 Loss:0.08422 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.61s\n", - "1384528 Examples seen. Accuracy:0.9464 Error: 0.13676 Loss:0.10637 Threads: 8 Forward time: 4.90s Backward time: 3.28s Step time: 3.61s\n", - "1385168 Examples seen. Accuracy:0.9463 Error: 0.07621 Loss:0.07987 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.60s\n", - "1385808 Examples seen. Accuracy:0.9484 Error: 0.09249 Loss:0.07141 Threads: 8 Forward time: 4.91s Backward time: 3.24s Step time: 3.61s\n", - "1386448 Examples seen. Accuracy:0.9484 Error: 0.21104 Loss:0.25385 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.60s\n", - "1387088 Examples seen. Accuracy:0.9495 Error: 0.12764 Loss:0.14795 Threads: 8 Forward time: 4.91s Backward time: 3.26s Step time: 3.60s\n", - "1387728 Examples seen. Accuracy:0.9509 Error: 0.11365 Loss:0.09538 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.61s\n", - "1388368 Examples seen. Accuracy:0.9513 Error: 0.09333 Loss:0.14052 Threads: 8 Forward time: 4.91s Backward time: 3.19s Step time: 3.60s\n", - "1389008 Examples seen. Accuracy:0.9522 Error: 0.13965 Loss:0.10517 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.65s\n", - "1389648 Examples seen. Accuracy:0.9524 Error: 0.15130 Loss:0.26410 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.60s\n", - "1390288 Examples seen. Accuracy:0.9521 Error: 0.07675 Loss:0.05048 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.60s\n", - "1390928 Examples seen. Accuracy:0.9518 Error: 0.11075 Loss:0.12370 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.62s\n", - "1391568 Examples seen. Accuracy:0.9516 Error: 0.15065 Loss:0.11448 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.61s\n", - "1392208 Examples seen. Accuracy:0.9522 Error: 0.09014 Loss:0.05841 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.61s\n", - "1392848 Examples seen. Accuracy:0.9530 Error: 0.09892 Loss:0.07473 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.66s\n", - "1393488 Examples seen. Accuracy:0.9538 Error: 0.07609 Loss:0.06965 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.63s\n", - "1394128 Examples seen. Accuracy:0.9523 Error: 0.02262 Loss:0.01602 Threads: 8 Forward time: 4.93s Backward time: 3.27s Step time: 3.63s\n", - "1394768 Examples seen. Accuracy:0.9522 Error: 0.14675 Loss:0.15687 Threads: 8 Forward time: 4.97s Backward time: 3.28s Step time: 3.65s\n", - "1395408 Examples seen. Accuracy:0.9526 Error: 0.03480 Loss:0.01913 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.68s\n", - "1396048 Examples seen. Accuracy:0.9529 Error: 0.08270 Loss:0.09780 Threads: 8 Forward time: 4.91s Backward time: 3.25s Step time: 3.61s\n", - "1396688 Examples seen. Accuracy:0.9527 Error: 0.19413 Loss:0.28563 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 28 Examples seen:1397312 Validation Accuracy: 0.9826 Validation Error: 0.0543 Validation Loss: 0.0600 Total time: 152.85min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 28. Working time: 2.55 hours.\n", - "1397952 Examples seen. Accuracy:0.9520 Error: 0.14071 Loss:0.11459 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.64s\n", - "1398592 Examples seen. Accuracy:0.9489 Error: 0.18503 Loss:0.28415 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.66s\n", - "1399232 Examples seen. Accuracy:0.9494 Error: 0.10677 Loss:0.11210 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.65s\n", - "1399872 Examples seen. Accuracy:0.9483 Error: 0.14365 Loss:0.11966 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.64s\n", - "1400512 Examples seen. Accuracy:0.9480 Error: 0.11535 Loss:0.11886 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.64s\n", - "1401152 Examples seen. Accuracy:0.9464 Error: 0.20490 Loss:0.33197 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "1401792 Examples seen. Accuracy:0.9462 Error: 0.13872 Loss:0.09682 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.66s\n", - "1402432 Examples seen. Accuracy:0.9467 Error: 0.19441 Loss:0.22473 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.68s\n", - "1403072 Examples seen. Accuracy:0.9482 Error: 0.07202 Loss:0.07695 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.74s\n", - "1403712 Examples seen. Accuracy:0.9488 Error: 0.10965 Loss:0.12676 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.63s\n", - "1404352 Examples seen. Accuracy:0.9493 Error: 0.14986 Loss:0.12941 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "1404992 Examples seen. Accuracy:0.9482 Error: 0.13769 Loss:0.13061 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "1405632 Examples seen. Accuracy:0.9493 Error: 0.19323 Loss:0.18651 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.71s\n", - "1406272 Examples seen. Accuracy:0.9489 Error: 0.09498 Loss:0.16309 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.61s\n", - "1406912 Examples seen. Accuracy:0.9487 Error: 0.13741 Loss:0.10482 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.59s\n", - "1407552 Examples seen. Accuracy:0.9484 Error: 0.07133 Loss:0.04875 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.59s\n", - "1408192 Examples seen. Accuracy:0.9475 Error: 0.17482 Loss:0.14570 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.59s\n", - "1408832 Examples seen. Accuracy:0.9472 Error: 0.10073 Loss:0.06810 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.60s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1409472 Examples seen. Accuracy:0.9469 Error: 0.08732 Loss:0.06722 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.61s\n", - "1410112 Examples seen. Accuracy:0.9463 Error: 0.18115 Loss:0.15862 Threads: 8 Forward time: 4.96s Backward time: 3.28s Step time: 3.62s\n", - "1410752 Examples seen. Accuracy:0.9454 Error: 0.20933 Loss:0.25905 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "1411392 Examples seen. Accuracy:0.9454 Error: 0.05363 Loss:0.05363 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.61s\n", - "1412032 Examples seen. Accuracy:0.9451 Error: 0.11578 Loss:0.12851 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1412672 Examples seen. Accuracy:0.9455 Error: 0.10642 Loss:0.07930 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.62s\n", - "1413312 Examples seen. Accuracy:0.9467 Error: 0.10118 Loss:0.11019 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "1413952 Examples seen. Accuracy:0.9474 Error: 0.14785 Loss:0.11550 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.63s\n", - "1414592 Examples seen. Accuracy:0.9470 Error: 0.18992 Loss:0.22856 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.63s\n", - "1415232 Examples seen. Accuracy:0.9477 Error: 0.15307 Loss:0.27508 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.67s\n", - "1415872 Examples seen. Accuracy:0.9472 Error: 0.07310 Loss:0.06484 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.65s\n", - "1416512 Examples seen. Accuracy:0.9468 Error: 0.12746 Loss:0.12694 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.68s\n", - "1417152 Examples seen. Accuracy:0.9467 Error: 0.11046 Loss:0.12320 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.62s\n", - "1417792 Examples seen. Accuracy:0.9461 Error: 0.18200 Loss:0.19451 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "1418432 Examples seen. Accuracy:0.9461 Error: 0.20302 Loss:0.19527 Threads: 8 Forward time: 5.08s Backward time: 3.21s Step time: 3.64s\n", - "1419072 Examples seen. Accuracy:0.9456 Error: 0.18755 Loss:0.27303 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "1419712 Examples seen. Accuracy:0.9456 Error: 0.14689 Loss:0.13639 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "1420352 Examples seen. Accuracy:0.9473 Error: 0.06867 Loss:0.04247 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.63s\n", - "1420992 Examples seen. Accuracy:0.9483 Error: 0.07056 Loss:0.04423 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.62s\n", - "1421632 Examples seen. Accuracy:0.9484 Error: 0.05687 Loss:0.03613 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.61s\n", - "1422272 Examples seen. Accuracy:0.9478 Error: 0.08224 Loss:0.10699 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.62s\n", - "1422912 Examples seen. Accuracy:0.9478 Error: 0.10988 Loss:0.08428 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.62s\n", - "1423552 Examples seen. Accuracy:0.9488 Error: 0.15950 Loss:0.22997 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.61s\n", - "1424192 Examples seen. Accuracy:0.9469 Error: 0.13396 Loss:0.12097 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.68s\n", - "1424832 Examples seen. Accuracy:0.9474 Error: 0.08122 Loss:0.06638 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.68s\n", - "1425472 Examples seen. Accuracy:0.9474 Error: 0.14775 Loss:0.12325 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1426112 Examples seen. Accuracy:0.9481 Error: 0.10931 Loss:0.12067 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.68s\n", - "1426752 Examples seen. Accuracy:0.9485 Error: 0.25497 Loss:0.54245 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 4.25s\n", - "1427392 Examples seen. Accuracy:0.9483 Error: 0.09018 Loss:0.08529 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.61s\n", - "1428032 Examples seen. Accuracy:0.9482 Error: 0.14518 Loss:0.09340 Threads: 8 Forward time: 4.88s Backward time: 3.23s Step time: 3.59s\n", - "1428672 Examples seen. Accuracy:0.9478 Error: 0.12913 Loss:0.19537 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.62s\n", - "1429312 Examples seen. Accuracy:0.9484 Error: 0.11393 Loss:0.09061 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.60s\n", - "1429952 Examples seen. Accuracy:0.9480 Error: 0.16260 Loss:0.14750 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.62s\n", - "1430592 Examples seen. Accuracy:0.9479 Error: 0.12916 Loss:0.17194 Threads: 8 Forward time: 4.91s Backward time: 3.28s Step time: 3.63s\n", - "1431232 Examples seen. Accuracy:0.9469 Error: 0.12566 Loss:0.18932 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.61s\n", - "1431872 Examples seen. Accuracy:0.9471 Error: 0.27435 Loss:0.37185 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.60s\n", - "1432512 Examples seen. Accuracy:0.9462 Error: 0.11519 Loss:0.09288 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.61s\n", - "1433152 Examples seen. Accuracy:0.9463 Error: 0.07026 Loss:0.05649 Threads: 8 Forward time: 4.90s Backward time: 3.25s Step time: 3.61s\n", - "1433792 Examples seen. Accuracy:0.9472 Error: 0.09960 Loss:0.06100 Threads: 8 Forward time: 4.90s Backward time: 3.25s Step time: 3.60s\n", - "1434432 Examples seen. Accuracy:0.9460 Error: 0.14787 Loss:0.23870 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.62s\n", - "1435072 Examples seen. Accuracy:0.9454 Error: 0.06363 Loss:0.07042 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1435712 Examples seen. Accuracy:0.9448 Error: 0.15126 Loss:0.17529 Threads: 8 Forward time: 4.93s Backward time: 3.25s Step time: 3.65s\n", - "1436352 Examples seen. Accuracy:0.9444 Error: 0.25060 Loss:0.33553 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.71s\n", - "1436992 Examples seen. Accuracy:0.9443 Error: 0.13863 Loss:0.15685 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.70s\n", - "1437632 Examples seen. Accuracy:0.9451 Error: 0.08611 Loss:0.05561 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.73s\n", - "1438272 Examples seen. Accuracy:0.9462 Error: 0.18388 Loss:0.13938 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.68s\n", - "1438912 Examples seen. Accuracy:0.9458 Error: 0.08707 Loss:0.06957 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1439552 Examples seen. Accuracy:0.9445 Error: 0.13905 Loss:0.15246 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.66s\n", - "1440192 Examples seen. Accuracy:0.9454 Error: 0.16139 Loss:0.13547 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.67s\n", - "1440832 Examples seen. Accuracy:0.9453 Error: 0.12961 Loss:0.18257 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "1441472 Examples seen. Accuracy:0.9463 Error: 0.14758 Loss:0.16493 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.66s\n", - "1442112 Examples seen. Accuracy:0.9462 Error: 0.21257 Loss:0.17336 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.66s\n", - "1442752 Examples seen. Accuracy:0.9457 Error: 0.10177 Loss:0.06311 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.69s\n", - "1443392 Examples seen. Accuracy:0.9446 Error: 0.22161 Loss:0.39676 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.69s\n", - "1444032 Examples seen. Accuracy:0.9436 Error: 0.07667 Loss:0.04998 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.68s\n", - "1444672 Examples seen. Accuracy:0.9416 Error: 0.20379 Loss:0.29059 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.67s\n", - "1445312 Examples seen. Accuracy:0.9425 Error: 0.14832 Loss:0.13608 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.67s\n", - "1445952 Examples seen. Accuracy:0.9431 Error: 0.22307 Loss:0.21920 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.65s\n", - "1446592 Examples seen. Accuracy:0.9458 Error: 0.09392 Loss:0.06083 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "Epochs: 29 Examples seen:1447216 Validation Accuracy: 0.9815 Validation Error: 0.0539 Validation Loss: 0.0588 Total time: 158.07min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 29. Working time: 2.63 hours.\n", - "1447856 Examples seen. Accuracy:0.9464 Error: 0.10081 Loss:0.12748 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.67s\n", - "1448496 Examples seen. Accuracy:0.9473 Error: 0.10701 Loss:0.08665 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.70s\n", - "1449136 Examples seen. Accuracy:0.9456 Error: 0.10133 Loss:0.12105 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.69s\n", - "1449776 Examples seen. Accuracy:0.9455 Error: 0.06103 Loss:0.04085 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.71s\n", - "1450416 Examples seen. Accuracy:0.9461 Error: 0.11119 Loss:0.08990 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.72s\n", - "1451056 Examples seen. Accuracy:0.9468 Error: 0.24257 Loss:0.30252 Threads: 8 Forward time: 5.13s Backward time: 3.30s Step time: 4.23s\n", - "1451696 Examples seen. Accuracy:0.9482 Error: 0.15170 Loss:0.12253 Threads: 8 Forward time: 5.17s Backward time: 3.30s Step time: 3.84s\n", - "1452336 Examples seen. Accuracy:0.9468 Error: 0.16842 Loss:0.19528 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 4.16s\n", - "1452976 Examples seen. Accuracy:0.9476 Error: 0.04499 Loss:0.04408 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.64s\n", - "1453616 Examples seen. Accuracy:0.9477 Error: 0.16040 Loss:0.22434 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.59s\n", - "1454256 Examples seen. Accuracy:0.9474 Error: 0.06507 Loss:0.04274 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.61s\n", - "1454896 Examples seen. Accuracy:0.9471 Error: 0.12018 Loss:0.08207 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.61s\n", - "1455536 Examples seen. Accuracy:0.9457 Error: 0.12105 Loss:0.13171 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.66s\n", - "1456176 Examples seen. Accuracy:0.9463 Error: 0.18114 Loss:0.15306 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.67s\n", - "1456816 Examples seen. Accuracy:0.9472 Error: 0.06513 Loss:0.03691 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.64s\n", - "1457456 Examples seen. Accuracy:0.9475 Error: 0.18867 Loss:0.19671 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.65s\n", - "1458096 Examples seen. Accuracy:0.9487 Error: 0.12882 Loss:0.13529 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.63s\n", - "1458736 Examples seen. Accuracy:0.9480 Error: 0.14104 Loss:0.21133 Threads: 8 Forward time: 5.05s Backward time: 3.21s Step time: 3.64s\n", - "1459376 Examples seen. Accuracy:0.9478 Error: 0.12597 Loss:0.11772 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.63s\n", - "1460016 Examples seen. Accuracy:0.9481 Error: 0.17798 Loss:0.20918 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.65s\n", - "1460656 Examples seen. Accuracy:0.9491 Error: 0.05437 Loss:0.04056 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.65s\n", - "1461296 Examples seen. Accuracy:0.9503 Error: 0.09656 Loss:0.15096 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.72s\n", - "1461936 Examples seen. Accuracy:0.9510 Error: 0.05769 Loss:0.04776 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n", - "1462576 Examples seen. Accuracy:0.9500 Error: 0.14814 Loss:0.16030 Threads: 8 Forward time: 5.16s Backward time: 3.29s Step time: 3.72s\n", - "1463216 Examples seen. Accuracy:0.9505 Error: 0.06782 Loss:0.06567 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.62s\n", - "1463856 Examples seen. Accuracy:0.9514 Error: 0.18103 Loss:0.18898 Threads: 8 Forward time: 4.92s Backward time: 3.25s Step time: 3.68s\n", - "1464496 Examples seen. Accuracy:0.9517 Error: 0.17383 Loss:0.19871 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "1465136 Examples seen. Accuracy:0.9509 Error: 0.13407 Loss:0.11429 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.64s\n", - "1465776 Examples seen. Accuracy:0.9512 Error: 0.14041 Loss:0.14374 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.64s\n", - "1466416 Examples seen. Accuracy:0.9507 Error: 0.09041 Loss:0.08903 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.61s\n", - "1467056 Examples seen. Accuracy:0.9504 Error: 0.13551 Loss:0.10703 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.63s\n", - "1467696 Examples seen. Accuracy:0.9486 Error: 0.10316 Loss:0.09404 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "1468336 Examples seen. Accuracy:0.9488 Error: 0.12375 Loss:0.10232 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.76s\n", - "1468976 Examples seen. Accuracy:0.9496 Error: 0.15481 Loss:0.16754 Threads: 8 Forward time: 5.11s Backward time: 3.27s Step time: 3.69s\n", - "1469616 Examples seen. Accuracy:0.9485 Error: 0.17036 Loss:0.18838 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.67s\n", - "1470256 Examples seen. Accuracy:0.9483 Error: 0.09654 Loss:0.10328 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.66s\n", - "1470896 Examples seen. Accuracy:0.9483 Error: 0.04706 Loss:0.02561 Threads: 8 Forward time: 5.05s Backward time: 3.30s Step time: 3.72s\n", - "1471536 Examples seen. Accuracy:0.9489 Error: 0.08310 Loss:0.05426 Threads: 8 Forward time: 5.06s Backward time: 3.28s Step time: 3.69s\n", - "1472176 Examples seen. Accuracy:0.9496 Error: 0.07609 Loss:0.04889 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.76s\n", - "1472816 Examples seen. Accuracy:0.9498 Error: 0.14653 Loss:0.18602 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.79s\n", - "1473456 Examples seen. Accuracy:0.9501 Error: 0.09466 Loss:0.06107 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.73s\n", - "1474096 Examples seen. Accuracy:0.9496 Error: 0.12114 Loss:0.12567 Threads: 8 Forward time: 5.09s Backward time: 3.25s Step time: 3.87s\n", - "1474736 Examples seen. Accuracy:0.9500 Error: 0.08347 Loss:0.05218 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.76s\n", - "1475376 Examples seen. Accuracy:0.9511 Error: 0.05955 Loss:0.04213 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.69s\n", - "1476016 Examples seen. Accuracy:0.9489 Error: 0.20382 Loss:0.19270 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.72s\n", - "1476656 Examples seen. Accuracy:0.9478 Error: 0.15078 Loss:0.14858 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.69s\n", - "1477296 Examples seen. Accuracy:0.9471 Error: 0.08690 Loss:0.08119 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.68s\n", - "1477936 Examples seen. Accuracy:0.9481 Error: 0.09349 Loss:0.10810 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.70s\n", - "1478576 Examples seen. Accuracy:0.9484 Error: 0.18310 Loss:0.22332 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "1479216 Examples seen. Accuracy:0.9488 Error: 0.16354 Loss:0.16635 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.67s\n", - "1479856 Examples seen. Accuracy:0.9489 Error: 0.13712 Loss:0.15687 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.71s\n", - "1480496 Examples seen. Accuracy:0.9490 Error: 0.16396 Loss:0.14134 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.71s\n", - "1481136 Examples seen. Accuracy:0.9486 Error: 0.12897 Loss:0.09610 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.67s\n", - "1481776 Examples seen. Accuracy:0.9491 Error: 0.09852 Loss:0.17474 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.72s\n", - "1482416 Examples seen. Accuracy:0.9493 Error: 0.18206 Loss:0.18885 Threads: 8 Forward time: 4.95s Backward time: 3.17s Step time: 3.69s\n", - "1483056 Examples seen. Accuracy:0.9497 Error: 0.10386 Loss:0.08507 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.68s\n", - "1483696 Examples seen. Accuracy:0.9498 Error: 0.15285 Loss:0.11083 Threads: 8 Forward time: 5.03s Backward time: 3.20s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1484336 Examples seen. Accuracy:0.9496 Error: 0.15772 Loss:0.18276 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.69s\n", - "1484976 Examples seen. Accuracy:0.9484 Error: 0.13115 Loss:0.14634 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.72s\n", - "1485616 Examples seen. Accuracy:0.9497 Error: 0.10859 Loss:0.09835 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.69s\n", - "1486256 Examples seen. Accuracy:0.9500 Error: 0.09859 Loss:0.14146 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.65s\n", - "1486896 Examples seen. Accuracy:0.9494 Error: 0.14755 Loss:0.17385 Threads: 8 Forward time: 5.09s Backward time: 3.26s Step time: 3.70s\n", - "1487536 Examples seen. Accuracy:0.9506 Error: 0.07704 Loss:0.06560 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.72s\n", - "1488176 Examples seen. Accuracy:0.9512 Error: 0.14551 Loss:0.16851 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.70s\n", - "1488816 Examples seen. Accuracy:0.9512 Error: 0.14966 Loss:0.16631 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.71s\n", - "1489456 Examples seen. Accuracy:0.9504 Error: 0.16662 Loss:0.22732 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.69s\n", - "1490096 Examples seen. Accuracy:0.9502 Error: 0.12427 Loss:0.12262 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.71s\n", - "1490736 Examples seen. Accuracy:0.9507 Error: 0.06290 Loss:0.03997 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.70s\n", - "1491376 Examples seen. Accuracy:0.9486 Error: 0.16768 Loss:0.19408 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.71s\n", - "1492016 Examples seen. Accuracy:0.9496 Error: 0.13291 Loss:0.09654 Threads: 8 Forward time: 5.10s Backward time: 3.26s Step time: 3.74s\n", - "1492656 Examples seen. Accuracy:0.9496 Error: 0.11797 Loss:0.15322 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.69s\n", - "1493296 Examples seen. Accuracy:0.9500 Error: 0.10390 Loss:0.12544 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.72s\n", - "1493936 Examples seen. Accuracy:0.9510 Error: 0.13868 Loss:0.15337 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.76s\n", - "1494576 Examples seen. Accuracy:0.9507 Error: 0.13635 Loss:0.13890 Threads: 8 Forward time: 5.11s Backward time: 3.31s Step time: 3.88s\n", - "1495216 Examples seen. Accuracy:0.9507 Error: 0.22470 Loss:0.30595 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.81s\n", - "1495856 Examples seen. Accuracy:0.9505 Error: 0.14878 Loss:0.16159 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.78s\n", - "1496496 Examples seen. Accuracy:0.9508 Error: 0.26502 Loss:0.46716 Threads: 8 Forward time: 5.10s Backward time: 3.29s Step time: 3.77s\n", - "Starting Validation.\n", - "Epochs: 30 Examples seen:1497120 Validation Accuracy: 0.9819 Validation Error: 0.0523 Validation Loss: 0.0569 Total time: 163.36min\n", - "Starting Testing.\n", - "Epochs: 30 Examples seen:1497120 Test Accuracy: 0.9859 Test Error: 0.0456 Test Loss: 0.0411 Total time: 163.83min\n", - "Epoch time: 4.9 minutes. 100 epochs: 8.2 hours.\n", - "Epochs: 30. Working time: 2.73 hours.\n", - "Learning rate set to:0.00074\n", - "1497760 Examples seen. Accuracy:0.9508 Error: 0.14861 Loss:0.14557 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.90s\n", - "1498400 Examples seen. Accuracy:0.9519 Error: 0.17429 Loss:0.13326 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "1499040 Examples seen. Accuracy:0.9517 Error: 0.17726 Loss:0.22423 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.75s\n", - "1499680 Examples seen. Accuracy:0.9513 Error: 0.06564 Loss:0.03994 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1500320 Examples seen. Accuracy:0.9519 Error: 0.11300 Loss:0.19092 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.69s\n", - "1500960 Examples seen. Accuracy:0.9528 Error: 0.06666 Loss:0.04443 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.71s\n", - "1501600 Examples seen. Accuracy:0.9532 Error: 0.14064 Loss:0.11202 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.71s\n", - "1502240 Examples seen. Accuracy:0.9532 Error: 0.10233 Loss:0.06801 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.73s\n", - "1502880 Examples seen. Accuracy:0.9534 Error: 0.14872 Loss:0.10102 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.72s\n", - "1503520 Examples seen. Accuracy:0.9539 Error: 0.21124 Loss:0.21142 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.73s\n", - "1504160 Examples seen. Accuracy:0.9539 Error: 0.21600 Loss:0.23482 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.76s\n", - "1504800 Examples seen. Accuracy:0.9539 Error: 0.12996 Loss:0.11105 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.70s\n", - "1505440 Examples seen. Accuracy:0.9532 Error: 0.13709 Loss:0.13116 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.70s\n", - "1506080 Examples seen. Accuracy:0.9527 Error: 0.20072 Loss:0.24342 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.75s\n", - "1506720 Examples seen. Accuracy:0.9532 Error: 0.20407 Loss:0.28499 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.69s\n", - "1507360 Examples seen. Accuracy:0.9532 Error: 0.05877 Loss:0.03551 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.71s\n", - "1508000 Examples seen. Accuracy:0.9528 Error: 0.05974 Loss:0.05044 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.75s\n", - "1508640 Examples seen. Accuracy:0.9530 Error: 0.14688 Loss:0.15558 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.71s\n", - "1509280 Examples seen. Accuracy:0.9522 Error: 0.13597 Loss:0.19620 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.70s\n", - "1509920 Examples seen. Accuracy:0.9523 Error: 0.09955 Loss:0.14736 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.70s\n", - "1510560 Examples seen. Accuracy:0.9510 Error: 0.22952 Loss:0.26378 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.77s\n", - "1511200 Examples seen. Accuracy:0.9508 Error: 0.17019 Loss:0.19236 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.68s\n", - "1511840 Examples seen. Accuracy:0.9501 Error: 0.08093 Loss:0.05472 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.68s\n", - "1512480 Examples seen. Accuracy:0.9481 Error: 0.11854 Loss:0.22845 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.77s\n", - "1513120 Examples seen. Accuracy:0.9488 Error: 0.08967 Loss:0.08184 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.70s\n", - "1513760 Examples seen. Accuracy:0.9513 Error: 0.10437 Loss:0.12372 Threads: 8 Forward time: 5.15s Backward time: 3.30s Step time: 3.73s\n", - "1514400 Examples seen. Accuracy:0.9516 Error: 0.08894 Loss:0.15115 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.77s\n", - "1515040 Examples seen. Accuracy:0.9506 Error: 0.22076 Loss:0.21993 Threads: 8 Forward time: 5.08s Backward time: 3.30s Step time: 3.73s\n", - "1515680 Examples seen. Accuracy:0.9501 Error: 0.12816 Loss:0.14776 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.72s\n", - "1516320 Examples seen. Accuracy:0.9498 Error: 0.18388 Loss:0.21836 Threads: 8 Forward time: 5.10s Backward time: 3.28s Step time: 3.98s\n", - "1516960 Examples seen. Accuracy:0.9495 Error: 0.12942 Loss:0.08564 Threads: 8 Forward time: 5.13s Backward time: 3.33s Step time: 3.74s\n", - "1517600 Examples seen. Accuracy:0.9483 Error: 0.14637 Loss:0.09820 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.75s\n", - "1518240 Examples seen. Accuracy:0.9492 Error: 0.11324 Loss:0.15473 Threads: 8 Forward time: 5.10s Backward time: 3.29s Step time: 3.76s\n", - "1518880 Examples seen. Accuracy:0.9509 Error: 0.08589 Loss:0.07891 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.73s\n", - "1519520 Examples seen. Accuracy:0.9502 Error: 0.12827 Loss:0.08967 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.66s\n", - "1520160 Examples seen. Accuracy:0.9501 Error: 0.12325 Loss:0.13687 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1520800 Examples seen. Accuracy:0.9503 Error: 0.18560 Loss:0.21073 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.66s\n", - "1521440 Examples seen. Accuracy:0.9515 Error: 0.13357 Loss:0.11113 Threads: 8 Forward time: 5.03s Backward time: 3.28s Step time: 3.67s\n", - "1522080 Examples seen. Accuracy:0.9521 Error: 0.02460 Loss:0.01391 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.66s\n", - "1522720 Examples seen. Accuracy:0.9517 Error: 0.10466 Loss:0.11742 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "1523360 Examples seen. Accuracy:0.9524 Error: 0.08335 Loss:0.08736 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.68s\n", - "1524000 Examples seen. Accuracy:0.9529 Error: 0.10179 Loss:0.08754 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.65s\n", - "1524640 Examples seen. Accuracy:0.9543 Error: 0.08259 Loss:0.05290 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.66s\n", - "1525280 Examples seen. Accuracy:0.9526 Error: 0.06198 Loss:0.04723 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1525920 Examples seen. Accuracy:0.9516 Error: 0.12397 Loss:0.10570 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "1526560 Examples seen. Accuracy:0.9523 Error: 0.13771 Loss:0.12439 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.70s\n", - "1527200 Examples seen. Accuracy:0.9510 Error: 0.15107 Loss:0.12061 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.68s\n", - "1527840 Examples seen. Accuracy:0.9519 Error: 0.04796 Loss:0.03480 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.67s\n", - "1528480 Examples seen. Accuracy:0.9524 Error: 0.10863 Loss:0.09759 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.68s\n", - "1529120 Examples seen. Accuracy:0.9521 Error: 0.12279 Loss:0.10436 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "1529760 Examples seen. Accuracy:0.9515 Error: 0.10621 Loss:0.13907 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.65s\n", - "1530400 Examples seen. Accuracy:0.9509 Error: 0.03789 Loss:0.02090 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.67s\n", - "1531040 Examples seen. Accuracy:0.9507 Error: 0.07933 Loss:0.10439 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.67s\n", - "1531680 Examples seen. Accuracy:0.9496 Error: 0.06434 Loss:0.04204 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.68s\n", - "1532320 Examples seen. Accuracy:0.9524 Error: 0.04289 Loss:0.02351 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.66s\n", - "1532960 Examples seen. Accuracy:0.9546 Error: 0.12121 Loss:0.14004 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.72s\n", - "1533600 Examples seen. Accuracy:0.9558 Error: 0.07601 Loss:0.04922 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.71s\n", - "1534240 Examples seen. Accuracy:0.9565 Error: 0.06622 Loss:0.06335 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.69s\n", - "1534880 Examples seen. Accuracy:0.9551 Error: 0.18031 Loss:0.20529 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.70s\n", - "1535520 Examples seen. Accuracy:0.9551 Error: 0.10154 Loss:0.08534 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.70s\n", - "1536160 Examples seen. Accuracy:0.9545 Error: 0.11521 Loss:0.11783 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.67s\n", - "1536800 Examples seen. Accuracy:0.9544 Error: 0.15828 Loss:0.15493 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.67s\n", - "1537440 Examples seen. Accuracy:0.9545 Error: 0.06720 Loss:0.03915 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.73s\n", - "1538080 Examples seen. Accuracy:0.9545 Error: 0.09375 Loss:0.14285 Threads: 8 Forward time: 5.07s Backward time: 3.22s Step time: 3.68s\n", - "1538720 Examples seen. Accuracy:0.9549 Error: 0.12214 Loss:0.12520 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.69s\n", - "1539360 Examples seen. Accuracy:0.9549 Error: 0.20448 Loss:0.19033 Threads: 8 Forward time: 4.93s Backward time: 3.28s Step time: 3.68s\n", - "1540000 Examples seen. Accuracy:0.9537 Error: 0.10431 Loss:0.07206 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.69s\n", - "1540640 Examples seen. Accuracy:0.9541 Error: 0.07141 Loss:0.04654 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.67s\n", - "1541280 Examples seen. Accuracy:0.9545 Error: 0.07900 Loss:0.06020 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.67s\n", - "1541920 Examples seen. Accuracy:0.9537 Error: 0.17797 Loss:0.23254 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.67s\n", - "1542560 Examples seen. Accuracy:0.9538 Error: 0.08829 Loss:0.07143 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.68s\n", - "1543200 Examples seen. Accuracy:0.9539 Error: 0.12470 Loss:0.10698 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.68s\n", - "1543840 Examples seen. Accuracy:0.9535 Error: 0.11565 Loss:0.08203 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "1544480 Examples seen. Accuracy:0.9529 Error: 0.07867 Loss:0.06331 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.65s\n", - "1545120 Examples seen. Accuracy:0.9533 Error: 0.11913 Loss:0.09546 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.72s\n", - "1545760 Examples seen. Accuracy:0.9531 Error: 0.12726 Loss:0.43528 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.65s\n", - "1546400 Examples seen. Accuracy:0.9528 Error: 0.05801 Loss:0.03907 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.68s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 31 Examples seen:1547024 Validation Accuracy: 0.9830 Validation Error: 0.0514 Validation Loss: 0.0551 Total time: 169.13min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 31. Working time: 2.82 hours.\n", - "1547664 Examples seen. Accuracy:0.9529 Error: 0.16628 Loss:0.18915 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.71s\n", - "1548304 Examples seen. Accuracy:0.9530 Error: 0.12248 Loss:0.13709 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.69s\n", - "1548944 Examples seen. Accuracy:0.9530 Error: 0.16205 Loss:0.16951 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.70s\n", - "1549584 Examples seen. Accuracy:0.9531 Error: 0.05847 Loss:0.05447 Threads: 8 Forward time: 5.10s Backward time: 3.34s Step time: 3.68s\n", - "1550224 Examples seen. Accuracy:0.9545 Error: 0.14723 Loss:0.21584 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.79s\n", - "1550864 Examples seen. Accuracy:0.9533 Error: 0.14106 Loss:0.11526 Threads: 8 Forward time: 5.07s Backward time: 3.22s Step time: 3.76s\n", - "1551504 Examples seen. Accuracy:0.9538 Error: 0.04955 Loss:0.03242 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.76s\n", - "1552144 Examples seen. Accuracy:0.9538 Error: 0.20006 Loss:0.23725 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.75s\n", - "1552784 Examples seen. Accuracy:0.9545 Error: 0.15203 Loss:0.12704 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.73s\n", - "1553424 Examples seen. Accuracy:0.9537 Error: 0.31141 Loss:0.32836 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.69s\n", - "1554064 Examples seen. Accuracy:0.9525 Error: 0.14953 Loss:0.17753 Threads: 8 Forward time: 5.24s Backward time: 3.27s Step time: 3.89s\n", - "1554704 Examples seen. Accuracy:0.9535 Error: 0.15743 Loss:0.18140 Threads: 8 Forward time: 5.13s Backward time: 3.32s Step time: 3.91s\n", - "1555344 Examples seen. Accuracy:0.9535 Error: 0.07978 Loss:0.08508 Threads: 8 Forward time: 5.13s Backward time: 3.32s Step time: 3.87s\n", - "1555984 Examples seen. Accuracy:0.9546 Error: 0.15380 Loss:0.19071 Threads: 8 Forward time: 5.08s Backward time: 3.28s Step time: 3.87s\n", - "1556624 Examples seen. Accuracy:0.9562 Error: 0.07796 Loss:0.04686 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.81s\n", - "1557264 Examples seen. Accuracy:0.9550 Error: 0.22887 Loss:0.26361 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1557904 Examples seen. Accuracy:0.9558 Error: 0.08477 Loss:0.05844 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.73s\n", - "1558544 Examples seen. Accuracy:0.9539 Error: 0.15311 Loss:0.13919 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1559184 Examples seen. Accuracy:0.9539 Error: 0.14976 Loss:0.20154 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.69s\n", - "1559824 Examples seen. Accuracy:0.9547 Error: 0.05121 Loss:0.02976 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.74s\n", - "1560464 Examples seen. Accuracy:0.9533 Error: 0.17158 Loss:0.19611 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.70s\n", - "1561104 Examples seen. Accuracy:0.9535 Error: 0.12366 Loss:0.11041 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "1561744 Examples seen. Accuracy:0.9541 Error: 0.09472 Loss:0.16228 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.69s\n", - "1562384 Examples seen. Accuracy:0.9537 Error: 0.13272 Loss:0.12658 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1563024 Examples seen. Accuracy:0.9532 Error: 0.13142 Loss:0.14904 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.81s\n", - "1563664 Examples seen. Accuracy:0.9525 Error: 0.16918 Loss:0.21944 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.74s\n", - "1564304 Examples seen. Accuracy:0.9522 Error: 0.04426 Loss:0.05508 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.68s\n", - "1564944 Examples seen. Accuracy:0.9526 Error: 0.05966 Loss:0.04083 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "1565584 Examples seen. Accuracy:0.9524 Error: 0.07746 Loss:0.06974 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.73s\n", - "1566224 Examples seen. Accuracy:0.9521 Error: 0.04751 Loss:0.03113 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.71s\n", - "1566864 Examples seen. Accuracy:0.9516 Error: 0.16781 Loss:0.12833 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.69s\n", - "1567504 Examples seen. Accuracy:0.9524 Error: 0.03014 Loss:0.01646 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.71s\n", - "1568144 Examples seen. Accuracy:0.9523 Error: 0.10557 Loss:0.11976 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.80s\n", - "1568784 Examples seen. Accuracy:0.9531 Error: 0.06069 Loss:0.03711 Threads: 8 Forward time: 5.03s Backward time: 3.29s Step time: 3.81s\n", - "1569424 Examples seen. Accuracy:0.9529 Error: 0.10758 Loss:0.08671 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.73s\n", - "1570064 Examples seen. Accuracy:0.9529 Error: 0.10368 Loss:0.08403 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.68s\n", - "1570704 Examples seen. Accuracy:0.9531 Error: 0.15546 Loss:0.15937 Threads: 8 Forward time: 5.01s Backward time: 3.32s Step time: 3.72s\n", - "1571344 Examples seen. Accuracy:0.9538 Error: 0.12103 Loss:0.12506 Threads: 8 Forward time: 5.02s Backward time: 3.29s Step time: 3.70s\n", - "1571984 Examples seen. Accuracy:0.9540 Error: 0.13243 Loss:0.12731 Threads: 8 Forward time: 5.16s Backward time: 3.35s Step time: 3.76s\n", - "1572624 Examples seen. Accuracy:0.9541 Error: 0.10642 Loss:0.09237 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.79s\n", - "1573264 Examples seen. Accuracy:0.9533 Error: 0.08424 Loss:0.09995 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.77s\n", - "1573904 Examples seen. Accuracy:0.9542 Error: 0.12324 Loss:0.13014 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.69s\n", - "1574544 Examples seen. Accuracy:0.9547 Error: 0.11231 Loss:0.11648 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.71s\n", - "1575184 Examples seen. Accuracy:0.9549 Error: 0.18973 Loss:0.20791 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.77s\n", - "1575824 Examples seen. Accuracy:0.9553 Error: 0.09004 Loss:0.07261 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.74s\n", - "1576464 Examples seen. Accuracy:0.9550 Error: 0.14157 Loss:0.16398 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.69s\n", - "1577104 Examples seen. Accuracy:0.9543 Error: 0.19825 Loss:0.32202 Threads: 8 Forward time: 5.05s Backward time: 3.20s Step time: 3.82s\n", - "1577744 Examples seen. Accuracy:0.9537 Error: 0.09035 Loss:0.06121 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "1578384 Examples seen. Accuracy:0.9526 Error: 0.13430 Loss:0.12229 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.70s\n", - "1579024 Examples seen. Accuracy:0.9529 Error: 0.10155 Loss:0.07976 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.70s\n", - "1579664 Examples seen. Accuracy:0.9531 Error: 0.05964 Loss:0.03903 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.70s\n", - "1580304 Examples seen. Accuracy:0.9510 Error: 0.17312 Loss:0.14742 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.68s\n", - "1580944 Examples seen. Accuracy:0.9503 Error: 0.04752 Loss:0.03827 Threads: 8 Forward time: 4.98s Backward time: 3.28s Step time: 3.69s\n", - "1581584 Examples seen. Accuracy:0.9517 Error: 0.08759 Loss:0.10440 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.72s\n", - "1582224 Examples seen. Accuracy:0.9515 Error: 0.06674 Loss:0.04134 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.71s\n", - "1582864 Examples seen. Accuracy:0.9513 Error: 0.14279 Loss:0.17607 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "1583504 Examples seen. Accuracy:0.9532 Error: 0.07118 Loss:0.07629 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1584144 Examples seen. Accuracy:0.9526 Error: 0.12284 Loss:0.13673 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.71s\n", - "1584784 Examples seen. Accuracy:0.9528 Error: 0.07510 Loss:0.09906 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.69s\n", - "1585424 Examples seen. Accuracy:0.9531 Error: 0.11727 Loss:0.09386 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.78s\n", - "1586064 Examples seen. Accuracy:0.9526 Error: 0.14606 Loss:0.19256 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.66s\n", - "1586704 Examples seen. Accuracy:0.9529 Error: 0.13011 Loss:0.16303 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.73s\n", - "1587344 Examples seen. Accuracy:0.9533 Error: 0.08130 Loss:0.16172 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.69s\n", - "1587984 Examples seen. Accuracy:0.9549 Error: 0.07929 Loss:0.05760 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "1588624 Examples seen. Accuracy:0.9532 Error: 0.14834 Loss:0.13651 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.63s\n", - "1589264 Examples seen. Accuracy:0.9528 Error: 0.21870 Loss:0.24153 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1589904 Examples seen. Accuracy:0.9533 Error: 0.09359 Loss:0.07661 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.68s\n", - "1590544 Examples seen. Accuracy:0.9542 Error: 0.06378 Loss:0.04138 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.66s\n", - "1591184 Examples seen. Accuracy:0.9545 Error: 0.12400 Loss:0.16289 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.70s\n", - "1591824 Examples seen. Accuracy:0.9557 Error: 0.06807 Loss:0.05759 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.66s\n", - "1592464 Examples seen. Accuracy:0.9552 Error: 0.10427 Loss:0.17617 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.65s\n", - "1593104 Examples seen. Accuracy:0.9556 Error: 0.15271 Loss:0.13842 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.67s\n", - "1593744 Examples seen. Accuracy:0.9566 Error: 0.18541 Loss:0.22167 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.68s\n", - "1594384 Examples seen. Accuracy:0.9577 Error: 0.06719 Loss:0.06279 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.65s\n", - "1595024 Examples seen. Accuracy:0.9565 Error: 0.07624 Loss:0.05033 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1595664 Examples seen. Accuracy:0.9558 Error: 0.15134 Loss:0.14120 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.65s\n", - "1596304 Examples seen. Accuracy:0.9557 Error: 0.14495 Loss:0.21276 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.67s\n", - "Starting Validation.\n", - "Epochs: 32 Examples seen:1596928 Validation Accuracy: 0.9819 Validation Error: 0.0508 Validation Loss: 0.0543 Total time: 174.44min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.439 Min Weight: -0.343 Max Output: 5.860 Min Output: -5.674 TNNetConvolutionLinear 66,66,64 Times: 8.59s 0.40s Parent:0\n", - "Layer 2 Max Output: 5.860 Min Output: -3.336 TNNetMaxPool 33,33,64 Times: 3.64s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.625 Min Weight: 0.249 Max Output: 8.984 Min Output: -5.738 TNNetMovingStdNormalization 33,33,64 Times: 0.28s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.383 Min Weight: -0.216 Max Output: 11.040 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.89s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.369 Min Weight: -0.339 Max Output: 10.826 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.91s 0.18s Parent:4\n", - "Layer 6 Max Output: 10.826 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.49s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.410 Min Weight: -0.262 Max Output: 8.117 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.266 Min Weight: -0.242 Max Output: 6.709 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.49s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.249 Min Weight: -0.203 Max Output: 10.613 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.45s 0.02s Parent:8\n", - "Layer 10 Max Output: 10.613 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.00s 0.00s Parent:9\n", - "Layer 11 Max Output: 10.613 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.393 Min Weight: -0.392 Max Output: 30.513 Min Output: -13.762 TNNetFullConnectLinear 39,1,1 Times: 0.02s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 32. Working time: 2.91 hours.\n", - "1597568 Examples seen. Accuracy:0.9541 Error: 0.16013 Loss:0.24270 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.77s\n", - "1598208 Examples seen. Accuracy:0.9552 Error: 0.10279 Loss:0.11600 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.69s\n", - "1598848 Examples seen. Accuracy:0.9563 Error: 0.16388 Loss:0.23268 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1599488 Examples seen. Accuracy:0.9570 Error: 0.08905 Loss:0.06510 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.73s\n", - "1600128 Examples seen. Accuracy:0.9554 Error: 0.14965 Loss:0.31265 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.74s\n", - "1600768 Examples seen. Accuracy:0.9560 Error: 0.14754 Loss:0.16965 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.73s\n", - "1601408 Examples seen. Accuracy:0.9563 Error: 0.08028 Loss:0.04986 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.64s\n", - "1602048 Examples seen. Accuracy:0.9564 Error: 0.06702 Loss:0.04836 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.64s\n", - "1602688 Examples seen. Accuracy:0.9569 Error: 0.04664 Loss:0.08026 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.74s\n", - "1603328 Examples seen. Accuracy:0.9573 Error: 0.08061 Loss:0.09195 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.64s\n", - "1603968 Examples seen. Accuracy:0.9571 Error: 0.12977 Loss:0.13318 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.65s\n", - "1604608 Examples seen. Accuracy:0.9557 Error: 0.13459 Loss:0.19731 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.62s\n", - "1605248 Examples seen. Accuracy:0.9555 Error: 0.09090 Loss:0.15428 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1605888 Examples seen. Accuracy:0.9548 Error: 0.11312 Loss:0.09419 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.61s\n", - "1606528 Examples seen. Accuracy:0.9543 Error: 0.17129 Loss:0.22943 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.64s\n", - "1607168 Examples seen. Accuracy:0.9538 Error: 0.09884 Loss:0.14281 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.65s\n", - "1607808 Examples seen. Accuracy:0.9542 Error: 0.15143 Loss:0.13002 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.64s\n", - "1608448 Examples seen. Accuracy:0.9546 Error: 0.05632 Loss:0.05603 Threads: 8 Forward time: 4.95s Backward time: 3.26s Step time: 3.65s\n", - "1609088 Examples seen. Accuracy:0.9545 Error: 0.08208 Loss:0.08032 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.69s\n", - "1609728 Examples seen. Accuracy:0.9528 Error: 0.12700 Loss:0.15873 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.65s\n", - "1610368 Examples seen. Accuracy:0.9531 Error: 0.17311 Loss:0.16741 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.67s\n", - "1611008 Examples seen. Accuracy:0.9535 Error: 0.04387 Loss:0.02371 Threads: 8 Forward time: 5.04s Backward time: 3.31s Step time: 3.69s\n", - "1611648 Examples seen. Accuracy:0.9533 Error: 0.12807 Loss:0.15235 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.65s\n", - "1612288 Examples seen. Accuracy:0.9537 Error: 0.12386 Loss:0.11456 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.63s\n", - "1612928 Examples seen. Accuracy:0.9538 Error: 0.19187 Loss:0.16314 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.64s\n", - "1613568 Examples seen. Accuracy:0.9507 Error: 0.12188 Loss:0.13847 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.64s\n", - "1614208 Examples seen. Accuracy:0.9507 Error: 0.22041 Loss:0.34930 Threads: 8 Forward time: 4.95s Backward time: 3.27s Step time: 3.63s\n", - "1614848 Examples seen. Accuracy:0.9507 Error: 0.09226 Loss:0.06922 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.63s\n", - "1615488 Examples seen. Accuracy:0.9505 Error: 0.22833 Loss:0.21934 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.64s\n", - "1616128 Examples seen. Accuracy:0.9495 Error: 0.20253 Loss:0.23931 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.62s\n", - "1616768 Examples seen. Accuracy:0.9503 Error: 0.06496 Loss:0.05083 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.62s\n", - "1617408 Examples seen. Accuracy:0.9499 Error: 0.08523 Loss:0.06422 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.63s\n", - "1618048 Examples seen. Accuracy:0.9500 Error: 0.09634 Loss:0.08651 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.65s\n", - "1618688 Examples seen. Accuracy:0.9513 Error: 0.04952 Loss:0.03577 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.65s\n", - "1619328 Examples seen. Accuracy:0.9511 Error: 0.12932 Loss:0.10125 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.62s\n", - "1619968 Examples seen. Accuracy:0.9505 Error: 0.22770 Loss:0.25212 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.61s\n", - "1620608 Examples seen. Accuracy:0.9510 Error: 0.12027 Loss:0.08784 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.70s\n", - "1621248 Examples seen. Accuracy:0.9511 Error: 0.08571 Loss:0.06502 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.63s\n", - "1621888 Examples seen. Accuracy:0.9506 Error: 0.15459 Loss:0.14452 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.67s\n", - "1622528 Examples seen. Accuracy:0.9524 Error: 0.13068 Loss:0.13016 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1623168 Examples seen. Accuracy:0.9521 Error: 0.13584 Loss:0.11718 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.63s\n", - "1623808 Examples seen. Accuracy:0.9516 Error: 0.08862 Loss:0.06971 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.62s\n", - "1624448 Examples seen. Accuracy:0.9514 Error: 0.18071 Loss:0.20353 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.62s\n", - "1625088 Examples seen. Accuracy:0.9516 Error: 0.13334 Loss:0.11217 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.62s\n", - "1625728 Examples seen. Accuracy:0.9516 Error: 0.21728 Loss:0.29211 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.65s\n", - "1626368 Examples seen. Accuracy:0.9522 Error: 0.15522 Loss:0.24822 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "1627008 Examples seen. Accuracy:0.9521 Error: 0.09664 Loss:0.10891 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.63s\n", - "1627648 Examples seen. Accuracy:0.9521 Error: 0.13974 Loss:0.20051 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.63s\n", - "1628288 Examples seen. Accuracy:0.9513 Error: 0.11770 Loss:0.17074 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.72s\n", - "1628928 Examples seen. Accuracy:0.9514 Error: 0.09608 Loss:0.07821 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "1629568 Examples seen. Accuracy:0.9517 Error: 0.10634 Loss:0.07820 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.65s\n", - "1630208 Examples seen. Accuracy:0.9504 Error: 0.09921 Loss:0.07959 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.63s\n", - "1630848 Examples seen. Accuracy:0.9515 Error: 0.07357 Loss:0.07447 Threads: 8 Forward time: 5.02s Backward time: 3.30s Step time: 3.65s\n", - "1631488 Examples seen. Accuracy:0.9523 Error: 0.07915 Loss:0.10349 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.68s\n", - "1632128 Examples seen. Accuracy:0.9511 Error: 0.16141 Loss:0.19480 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.63s\n", - "1632768 Examples seen. Accuracy:0.9507 Error: 0.18076 Loss:0.26829 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.60s\n", - "1633408 Examples seen. Accuracy:0.9512 Error: 0.11791 Loss:0.12039 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.73s\n", - "1634048 Examples seen. Accuracy:0.9510 Error: 0.16509 Loss:0.13551 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.67s\n", - "1634688 Examples seen. Accuracy:0.9506 Error: 0.09375 Loss:0.15359 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.67s\n", - "1635328 Examples seen. Accuracy:0.9502 Error: 0.14889 Loss:0.28760 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1635968 Examples seen. Accuracy:0.9500 Error: 0.16107 Loss:0.17486 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.68s\n", - "1636608 Examples seen. Accuracy:0.9508 Error: 0.18205 Loss:0.29972 Threads: 8 Forward time: 5.10s Backward time: 3.31s Step time: 3.75s\n", - "1637248 Examples seen. Accuracy:0.9500 Error: 0.08575 Loss:0.06189 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 4.33s\n", - "1637888 Examples seen. Accuracy:0.9498 Error: 0.12046 Loss:0.14560 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "1638528 Examples seen. Accuracy:0.9506 Error: 0.15807 Loss:0.18995 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.66s\n", - "1639168 Examples seen. Accuracy:0.9520 Error: 0.13134 Loss:0.10707 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.75s\n", - "1639808 Examples seen. Accuracy:0.9526 Error: 0.15300 Loss:0.12814 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.67s\n", - "1640448 Examples seen. Accuracy:0.9543 Error: 0.07585 Loss:0.09203 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.70s\n", - "1641088 Examples seen. Accuracy:0.9548 Error: 0.11588 Loss:0.09295 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.69s\n", - "1641728 Examples seen. Accuracy:0.9560 Error: 0.10694 Loss:0.14617 Threads: 8 Forward time: 4.98s Backward time: 3.26s Step time: 3.69s\n", - "1642368 Examples seen. Accuracy:0.9562 Error: 0.22664 Loss:0.23409 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.72s\n", - "1643008 Examples seen. Accuracy:0.9573 Error: 0.08852 Loss:0.06584 Threads: 8 Forward time: 5.12s Backward time: 3.28s Step time: 3.73s\n", - "1643648 Examples seen. Accuracy:0.9572 Error: 0.08380 Loss:0.05452 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1644288 Examples seen. Accuracy:0.9570 Error: 0.08202 Loss:0.08988 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.68s\n", - "1644928 Examples seen. Accuracy:0.9553 Error: 0.13404 Loss:0.13777 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.65s\n", - "1645568 Examples seen. Accuracy:0.9556 Error: 0.20865 Loss:0.26621 Threads: 8 Forward time: 5.15s Backward time: 3.28s Step time: 3.69s\n", - "1646208 Examples seen. Accuracy:0.9565 Error: 0.10476 Loss:0.06641 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.66s\n", - "Starting Validation.\n", - "Epochs: 33 Examples seen:1646832 Validation Accuracy: 0.9815 Validation Error: 0.0504 Validation Loss: 0.0528 Total time: 179.68min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 33. Working time: 2.99 hours.\n", - "1647472 Examples seen. Accuracy:0.9563 Error: 0.09850 Loss:0.08254 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.72s\n", - "1648112 Examples seen. Accuracy:0.9555 Error: 0.12796 Loss:0.15990 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.69s\n", - "1648752 Examples seen. Accuracy:0.9553 Error: 0.17558 Loss:0.18911 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "1649392 Examples seen. Accuracy:0.9555 Error: 0.09734 Loss:0.06831 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.64s\n", - "1650032 Examples seen. Accuracy:0.9540 Error: 0.13836 Loss:0.15612 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.66s\n", - "1650672 Examples seen. Accuracy:0.9534 Error: 0.14649 Loss:0.14747 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.66s\n", - "1651312 Examples seen. Accuracy:0.9535 Error: 0.11542 Loss:0.08050 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.63s\n", - "1651952 Examples seen. Accuracy:0.9539 Error: 0.09480 Loss:0.09954 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.64s\n", - "1652592 Examples seen. Accuracy:0.9556 Error: 0.09973 Loss:0.07211 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.67s\n", - "1653232 Examples seen. Accuracy:0.9568 Error: 0.06803 Loss:0.10311 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1653872 Examples seen. Accuracy:0.9560 Error: 0.19173 Loss:0.18893 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.63s\n", - "1654512 Examples seen. Accuracy:0.9567 Error: 0.10031 Loss:0.08963 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.66s\n", - "1655152 Examples seen. Accuracy:0.9564 Error: 0.09679 Loss:0.07225 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.65s\n", - "1655792 Examples seen. Accuracy:0.9570 Error: 0.10773 Loss:0.07797 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.66s\n", - "1656432 Examples seen. Accuracy:0.9566 Error: 0.04826 Loss:0.02962 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "1657072 Examples seen. Accuracy:0.9571 Error: 0.05885 Loss:0.03571 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.66s\n", - "1657712 Examples seen. Accuracy:0.9570 Error: 0.14733 Loss:0.18574 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.78s\n", - "1658352 Examples seen. Accuracy:0.9572 Error: 0.08525 Loss:0.05966 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.69s\n", - "1658992 Examples seen. Accuracy:0.9569 Error: 0.10925 Loss:0.14533 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.69s\n", - "1659632 Examples seen. Accuracy:0.9568 Error: 0.16850 Loss:0.19956 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.72s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1660272 Examples seen. Accuracy:0.9551 Error: 0.19491 Loss:0.17583 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.69s\n", - "1660912 Examples seen. Accuracy:0.9551 Error: 0.13619 Loss:0.10465 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.69s\n", - "1661552 Examples seen. Accuracy:0.9559 Error: 0.05943 Loss:0.06035 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.65s\n", - "1662192 Examples seen. Accuracy:0.9562 Error: 0.09879 Loss:0.07463 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.66s\n", - "1662832 Examples seen. Accuracy:0.9552 Error: 0.05178 Loss:0.02908 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.66s\n", - "1663472 Examples seen. Accuracy:0.9548 Error: 0.16065 Loss:0.22986 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "1664112 Examples seen. Accuracy:0.9549 Error: 0.07075 Loss:0.05845 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.64s\n", - "1664752 Examples seen. Accuracy:0.9549 Error: 0.08263 Loss:0.09076 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "1665392 Examples seen. Accuracy:0.9549 Error: 0.11192 Loss:0.14379 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.66s\n", - "1666032 Examples seen. Accuracy:0.9544 Error: 0.10133 Loss:0.10255 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.65s\n", - "1666672 Examples seen. Accuracy:0.9541 Error: 0.16394 Loss:0.15335 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.65s\n", - "1667312 Examples seen. Accuracy:0.9531 Error: 0.18874 Loss:0.20483 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "1667952 Examples seen. Accuracy:0.9545 Error: 0.10851 Loss:0.10826 Threads: 8 Forward time: 5.13s Backward time: 3.25s Step time: 3.66s\n", - "1668592 Examples seen. Accuracy:0.9539 Error: 0.20188 Loss:0.30046 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.67s\n", - "1669232 Examples seen. Accuracy:0.9534 Error: 0.09418 Loss:0.12630 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.72s\n", - "1669872 Examples seen. Accuracy:0.9526 Error: 0.10075 Loss:0.06494 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.67s\n", - "1670512 Examples seen. Accuracy:0.9534 Error: 0.10998 Loss:0.13485 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1671152 Examples seen. Accuracy:0.9526 Error: 0.14655 Loss:0.29125 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.69s\n", - "1671792 Examples seen. Accuracy:0.9514 Error: 0.08915 Loss:0.08607 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.70s\n", - "1672432 Examples seen. Accuracy:0.9512 Error: 0.14007 Loss:0.10383 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "1673072 Examples seen. Accuracy:0.9511 Error: 0.11376 Loss:0.07999 Threads: 8 Forward time: 5.11s Backward time: 3.26s Step time: 3.72s\n", - "1673712 Examples seen. Accuracy:0.9517 Error: 0.23166 Loss:0.29188 Threads: 8 Forward time: 5.11s Backward time: 3.27s Step time: 3.75s\n", - "1674352 Examples seen. Accuracy:0.9515 Error: 0.05771 Loss:0.04650 Threads: 8 Forward time: 5.11s Backward time: 3.26s Step time: 3.78s\n", - "1674992 Examples seen. Accuracy:0.9521 Error: 0.12818 Loss:0.12640 Threads: 8 Forward time: 5.13s Backward time: 3.24s Step time: 3.72s\n", - "1675632 Examples seen. Accuracy:0.9524 Error: 0.04559 Loss:0.03854 Threads: 8 Forward time: 4.86s Backward time: 3.17s Step time: 3.74s\n", - "1676272 Examples seen. Accuracy:0.9520 Error: 0.14036 Loss:0.12077 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "1676912 Examples seen. Accuracy:0.9529 Error: 0.08069 Loss:0.09086 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "1677552 Examples seen. Accuracy:0.9521 Error: 0.13960 Loss:0.12760 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "1678192 Examples seen. Accuracy:0.9529 Error: 0.14290 Loss:0.13268 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.66s\n", - "1678832 Examples seen. Accuracy:0.9529 Error: 0.16977 Loss:0.17442 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.70s\n", - "1679472 Examples seen. Accuracy:0.9542 Error: 0.13151 Loss:0.15229 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.69s\n", - "1680112 Examples seen. Accuracy:0.9535 Error: 0.13522 Loss:0.12510 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.63s\n", - "1680752 Examples seen. Accuracy:0.9547 Error: 0.18679 Loss:0.21029 Threads: 8 Forward time: 4.97s Backward time: 3.17s Step time: 3.65s\n", - "1681392 Examples seen. Accuracy:0.9538 Error: 0.08448 Loss:0.05118 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.67s\n", - "1682032 Examples seen. Accuracy:0.9540 Error: 0.11112 Loss:0.10243 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.66s\n", - "1682672 Examples seen. Accuracy:0.9533 Error: 0.18357 Loss:0.26247 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1683312 Examples seen. Accuracy:0.9527 Error: 0.15348 Loss:0.19900 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.64s\n", - "1683952 Examples seen. Accuracy:0.9534 Error: 0.07557 Loss:0.05268 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.65s\n", - "1684592 Examples seen. Accuracy:0.9531 Error: 0.17874 Loss:0.15898 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "1685232 Examples seen. Accuracy:0.9541 Error: 0.09773 Loss:0.10179 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.65s\n", - "1685872 Examples seen. Accuracy:0.9546 Error: 0.10309 Loss:0.08649 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.63s\n", - "1686512 Examples seen. Accuracy:0.9551 Error: 0.11693 Loss:0.10746 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "1687152 Examples seen. Accuracy:0.9557 Error: 0.09098 Loss:0.07947 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.82s\n", - "1687792 Examples seen. Accuracy:0.9546 Error: 0.09234 Loss:0.07160 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.68s\n", - "1688432 Examples seen. Accuracy:0.9543 Error: 0.06248 Loss:0.03925 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.64s\n", - "1689072 Examples seen. Accuracy:0.9535 Error: 0.17118 Loss:0.18170 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.64s\n", - "1689712 Examples seen. Accuracy:0.9540 Error: 0.14168 Loss:0.11793 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.67s\n", - "1690352 Examples seen. Accuracy:0.9536 Error: 0.09648 Loss:0.09039 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.67s\n", - "1690992 Examples seen. Accuracy:0.9540 Error: 0.11581 Loss:0.12648 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.64s\n", - "1691632 Examples seen. Accuracy:0.9533 Error: 0.15335 Loss:0.11384 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.70s\n", - "1692272 Examples seen. Accuracy:0.9526 Error: 0.20354 Loss:0.17184 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 3.69s\n", - "1692912 Examples seen. Accuracy:0.9530 Error: 0.11656 Loss:0.08986 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.68s\n", - "1693552 Examples seen. Accuracy:0.9541 Error: 0.09202 Loss:0.10550 Threads: 8 Forward time: 4.94s Backward time: 3.27s Step time: 3.64s\n", - "1694192 Examples seen. Accuracy:0.9552 Error: 0.05833 Loss:0.03385 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.66s\n", - "1694832 Examples seen. Accuracy:0.9553 Error: 0.03285 Loss:0.02443 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.66s\n", - "1695472 Examples seen. Accuracy:0.9552 Error: 0.09947 Loss:0.06305 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.72s\n", - "1696112 Examples seen. Accuracy:0.9555 Error: 0.17509 Loss:0.30145 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 34 Examples seen:1696736 Validation Accuracy: 0.9833 Validation Error: 0.0488 Validation Loss: 0.0500 Total time: 184.93min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 34. Working time: 3.08 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1697376 Examples seen. Accuracy:0.9564 Error: 0.12370 Loss:0.11274 Threads: 8 Forward time: 5.03s Backward time: 3.28s Step time: 4.00s\n", - "1698016 Examples seen. Accuracy:0.9581 Error: 0.06149 Loss:0.03997 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.75s\n", - "1698656 Examples seen. Accuracy:0.9563 Error: 0.11511 Loss:0.12596 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.72s\n", - "1699296 Examples seen. Accuracy:0.9575 Error: 0.04369 Loss:0.03368 Threads: 8 Forward time: 5.21s Backward time: 3.39s Step time: 3.78s\n", - "1699936 Examples seen. Accuracy:0.9581 Error: 0.14284 Loss:0.10989 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.73s\n", - "1700576 Examples seen. Accuracy:0.9573 Error: 0.12422 Loss:0.10745 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.69s\n", - "1701216 Examples seen. Accuracy:0.9565 Error: 0.07989 Loss:0.06614 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.68s\n", - "1701856 Examples seen. Accuracy:0.9572 Error: 0.03664 Loss:0.04892 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.69s\n", - "1702496 Examples seen. Accuracy:0.9562 Error: 0.08763 Loss:0.08473 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.71s\n", - "1703136 Examples seen. Accuracy:0.9564 Error: 0.09152 Loss:0.08435 Threads: 8 Forward time: 4.97s Backward time: 3.29s Step time: 3.72s\n", - "1703776 Examples seen. Accuracy:0.9564 Error: 0.15905 Loss:0.19017 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.61s\n", - "1704416 Examples seen. Accuracy:0.9566 Error: 0.02629 Loss:0.01696 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.62s\n", - "1705056 Examples seen. Accuracy:0.9575 Error: 0.10146 Loss:0.06423 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.63s\n", - "1705696 Examples seen. Accuracy:0.9574 Error: 0.11619 Loss:0.15490 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.64s\n", - "1706336 Examples seen. Accuracy:0.9579 Error: 0.09154 Loss:0.06040 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.66s\n", - "1706976 Examples seen. Accuracy:0.9571 Error: 0.09999 Loss:0.09077 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.65s\n", - "1707616 Examples seen. Accuracy:0.9577 Error: 0.10589 Loss:0.12010 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1708256 Examples seen. Accuracy:0.9556 Error: 0.09545 Loss:0.12080 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.65s\n", - "1708896 Examples seen. Accuracy:0.9525 Error: 0.18734 Loss:0.30184 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.66s\n", - "1709536 Examples seen. Accuracy:0.9527 Error: 0.09554 Loss:0.07707 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "1710176 Examples seen. Accuracy:0.9503 Error: 0.17724 Loss:0.19445 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.67s\n", - "1710816 Examples seen. Accuracy:0.9512 Error: 0.05224 Loss:0.02879 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.67s\n", - "1711456 Examples seen. Accuracy:0.9518 Error: 0.16336 Loss:0.15816 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.65s\n", - "1712096 Examples seen. Accuracy:0.9521 Error: 0.12217 Loss:0.11052 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.69s\n", - "1712736 Examples seen. Accuracy:0.9515 Error: 0.16639 Loss:0.16000 Threads: 8 Forward time: 5.14s Backward time: 3.32s Step time: 3.77s\n", - "1713376 Examples seen. Accuracy:0.9515 Error: 0.06803 Loss:0.05030 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.68s\n", - "1714016 Examples seen. Accuracy:0.9513 Error: 0.11856 Loss:0.09933 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "1714656 Examples seen. Accuracy:0.9522 Error: 0.07996 Loss:0.06198 Threads: 8 Forward time: 5.10s Backward time: 3.29s Step time: 3.69s\n", - "1715296 Examples seen. Accuracy:0.9532 Error: 0.14122 Loss:0.14930 Threads: 8 Forward time: 4.92s Backward time: 3.22s Step time: 3.70s\n", - "1715936 Examples seen. Accuracy:0.9548 Error: 0.13314 Loss:0.10416 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.65s\n", - "1716576 Examples seen. Accuracy:0.9564 Error: 0.07180 Loss:0.04883 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.82s\n", - "1717216 Examples seen. Accuracy:0.9568 Error: 0.14105 Loss:0.12290 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.79s\n", - "1717856 Examples seen. Accuracy:0.9571 Error: 0.12672 Loss:0.13729 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.73s\n", - "1718496 Examples seen. Accuracy:0.9586 Error: 0.08768 Loss:0.06487 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.74s\n", - "1719136 Examples seen. Accuracy:0.9579 Error: 0.14699 Loss:0.17922 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.68s\n", - "1719776 Examples seen. Accuracy:0.9570 Error: 0.16219 Loss:0.16167 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.68s\n", - "1720416 Examples seen. Accuracy:0.9570 Error: 0.15683 Loss:0.13489 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.72s\n", - "1721056 Examples seen. Accuracy:0.9565 Error: 0.21208 Loss:0.26963 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.70s\n", - "1721696 Examples seen. Accuracy:0.9550 Error: 0.15612 Loss:0.15786 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.71s\n", - "1722336 Examples seen. Accuracy:0.9545 Error: 0.12294 Loss:0.12137 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.72s\n", - "1722976 Examples seen. Accuracy:0.9531 Error: 0.17181 Loss:0.18947 Threads: 8 Forward time: 5.09s Backward time: 3.31s Step time: 3.68s\n", - "1723616 Examples seen. Accuracy:0.9524 Error: 0.10175 Loss:0.08284 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.67s\n", - "1724256 Examples seen. Accuracy:0.9523 Error: 0.07619 Loss:0.09009 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.67s\n", - "1724896 Examples seen. Accuracy:0.9531 Error: 0.12681 Loss:0.10745 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "1725536 Examples seen. Accuracy:0.9534 Error: 0.12187 Loss:0.10131 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.66s\n", - "1726176 Examples seen. Accuracy:0.9545 Error: 0.13699 Loss:0.09375 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.72s\n", - "1726816 Examples seen. Accuracy:0.9556 Error: 0.10385 Loss:0.07738 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.69s\n", - "1727456 Examples seen. Accuracy:0.9557 Error: 0.14743 Loss:0.20727 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.70s\n", - "1728096 Examples seen. Accuracy:0.9574 Error: 0.06703 Loss:0.05142 Threads: 8 Forward time: 5.05s Backward time: 3.29s Step time: 3.70s\n", - "1728736 Examples seen. Accuracy:0.9589 Error: 0.08880 Loss:0.08136 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.73s\n", - "1729376 Examples seen. Accuracy:0.9601 Error: 0.06884 Loss:0.04845 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.67s\n", - "1730016 Examples seen. Accuracy:0.9594 Error: 0.09164 Loss:0.05929 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.70s\n", - "1730656 Examples seen. Accuracy:0.9588 Error: 0.12068 Loss:0.12526 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.70s\n", - "1731296 Examples seen. Accuracy:0.9577 Error: 0.13381 Loss:0.11213 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.75s\n", - "1731936 Examples seen. Accuracy:0.9586 Error: 0.09838 Loss:0.09358 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.72s\n", - "1732576 Examples seen. Accuracy:0.9588 Error: 0.09802 Loss:0.10131 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "1733216 Examples seen. Accuracy:0.9582 Error: 0.17533 Loss:0.31308 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.72s\n", - "1733856 Examples seen. Accuracy:0.9588 Error: 0.09843 Loss:0.08373 Threads: 8 Forward time: 5.07s Backward time: 3.28s Step time: 3.73s\n", - "1734496 Examples seen. Accuracy:0.9592 Error: 0.09206 Loss:0.06744 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.70s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1735136 Examples seen. Accuracy:0.9586 Error: 0.21750 Loss:0.23582 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.73s\n", - "1735776 Examples seen. Accuracy:0.9582 Error: 0.14068 Loss:0.17215 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.74s\n", - "1736416 Examples seen. Accuracy:0.9586 Error: 0.10147 Loss:0.11270 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.69s\n", - "1737056 Examples seen. Accuracy:0.9575 Error: 0.09361 Loss:0.06872 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.69s\n", - "1737696 Examples seen. Accuracy:0.9566 Error: 0.18389 Loss:0.18223 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.69s\n", - "1738336 Examples seen. Accuracy:0.9567 Error: 0.10437 Loss:0.08243 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.69s\n", - "1738976 Examples seen. Accuracy:0.9567 Error: 0.13030 Loss:0.11569 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.67s\n", - "1739616 Examples seen. Accuracy:0.9581 Error: 0.08965 Loss:0.10235 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1740256 Examples seen. Accuracy:0.9588 Error: 0.11732 Loss:0.07823 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "1740896 Examples seen. Accuracy:0.9586 Error: 0.19713 Loss:0.20001 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.66s\n", - "1741536 Examples seen. Accuracy:0.9599 Error: 0.08251 Loss:0.05678 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "1742176 Examples seen. Accuracy:0.9589 Error: 0.08973 Loss:0.06632 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.68s\n", - "1742816 Examples seen. Accuracy:0.9584 Error: 0.06525 Loss:0.04217 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.72s\n", - "1743456 Examples seen. Accuracy:0.9584 Error: 0.10065 Loss:0.07313 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "1744096 Examples seen. Accuracy:0.9562 Error: 0.14727 Loss:0.16625 Threads: 8 Forward time: 4.92s Backward time: 3.18s Step time: 3.64s\n", - "1744736 Examples seen. Accuracy:0.9566 Error: 0.11290 Loss:0.06894 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.68s\n", - "1745376 Examples seen. Accuracy:0.9551 Error: 0.19128 Loss:0.26201 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.66s\n", - "1746016 Examples seen. Accuracy:0.9553 Error: 0.12488 Loss:0.10196 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.60s\n", - "Starting Validation.\n", - "Epochs: 35 Examples seen:1746640 Validation Accuracy: 0.9833 Validation Error: 0.0476 Validation Loss: 0.0481 Total time: 190.20min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 35. Working time: 3.17 hours.\n", - "1747280 Examples seen. Accuracy:0.9564 Error: 0.14957 Loss:0.14683 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.73s\n", - "1747920 Examples seen. Accuracy:0.9557 Error: 0.13448 Loss:0.13733 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.68s\n", - "1748560 Examples seen. Accuracy:0.9556 Error: 0.04399 Loss:0.02454 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.69s\n", - "1749200 Examples seen. Accuracy:0.9563 Error: 0.13650 Loss:0.13516 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.66s\n", - "1749840 Examples seen. Accuracy:0.9564 Error: 0.11992 Loss:0.11011 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.66s\n", - "1750480 Examples seen. Accuracy:0.9562 Error: 0.13378 Loss:0.14236 Threads: 8 Forward time: 5.10s Backward time: 3.24s Step time: 3.82s\n", - "1751120 Examples seen. Accuracy:0.9559 Error: 0.07769 Loss:0.06909 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.63s\n", - "1751760 Examples seen. Accuracy:0.9555 Error: 0.14064 Loss:0.14833 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.61s\n", - "1752400 Examples seen. Accuracy:0.9556 Error: 0.18615 Loss:0.25593 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "1753040 Examples seen. Accuracy:0.9567 Error: 0.07653 Loss:0.04897 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.70s\n", - "1753680 Examples seen. Accuracy:0.9565 Error: 0.11759 Loss:0.11167 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.61s\n", - "1754320 Examples seen. Accuracy:0.9561 Error: 0.08570 Loss:0.10862 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1754960 Examples seen. Accuracy:0.9550 Error: 0.17914 Loss:0.19847 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.62s\n", - "1755600 Examples seen. Accuracy:0.9551 Error: 0.09134 Loss:0.12681 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1756240 Examples seen. Accuracy:0.9555 Error: 0.08776 Loss:0.06581 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.63s\n", - "1756880 Examples seen. Accuracy:0.9556 Error: 0.07670 Loss:0.08249 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.64s\n", - "1757520 Examples seen. Accuracy:0.9538 Error: 0.14102 Loss:0.10427 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.61s\n", - "1758160 Examples seen. Accuracy:0.9532 Error: 0.19786 Loss:0.22957 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.61s\n", - "1758800 Examples seen. Accuracy:0.9539 Error: 0.07255 Loss:0.07461 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.63s\n", - "1759440 Examples seen. Accuracy:0.9539 Error: 0.07666 Loss:0.09994 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.63s\n", - "1760080 Examples seen. Accuracy:0.9549 Error: 0.12176 Loss:0.08088 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.65s\n", - "1760720 Examples seen. Accuracy:0.9543 Error: 0.10170 Loss:0.09934 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.61s\n", - "1761360 Examples seen. Accuracy:0.9542 Error: 0.10327 Loss:0.08137 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.64s\n", - "1762000 Examples seen. Accuracy:0.9544 Error: 0.09634 Loss:0.14334 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.62s\n", - "1762640 Examples seen. Accuracy:0.9541 Error: 0.12729 Loss:0.11145 Threads: 8 Forward time: 4.92s Backward time: 3.19s Step time: 3.67s\n", - "1763280 Examples seen. Accuracy:0.9551 Error: 0.14049 Loss:0.16942 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.62s\n", - "1763920 Examples seen. Accuracy:0.9539 Error: 0.08826 Loss:0.07529 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "1764560 Examples seen. Accuracy:0.9541 Error: 0.16872 Loss:0.18725 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.63s\n", - "1765200 Examples seen. Accuracy:0.9546 Error: 0.03819 Loss:0.02531 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.82s\n", - "1765840 Examples seen. Accuracy:0.9549 Error: 0.09602 Loss:0.09509 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "1766480 Examples seen. Accuracy:0.9561 Error: 0.11040 Loss:0.16706 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.64s\n", - "1767120 Examples seen. Accuracy:0.9566 Error: 0.14064 Loss:0.18925 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.66s\n", - "1767760 Examples seen. Accuracy:0.9559 Error: 0.17949 Loss:0.23182 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.68s\n", - "1768400 Examples seen. Accuracy:0.9551 Error: 0.06496 Loss:0.05117 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.67s\n", - "1769040 Examples seen. Accuracy:0.9542 Error: 0.15961 Loss:0.17385 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1769680 Examples seen. Accuracy:0.9538 Error: 0.08835 Loss:0.07721 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.61s\n", - "1770320 Examples seen. Accuracy:0.9535 Error: 0.15800 Loss:0.13758 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.62s\n", - "1770960 Examples seen. Accuracy:0.9544 Error: 0.06439 Loss:0.04447 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.63s\n", - "1771600 Examples seen. Accuracy:0.9544 Error: 0.06153 Loss:0.04529 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.62s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1772240 Examples seen. Accuracy:0.9546 Error: 0.08856 Loss:0.05875 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "1772880 Examples seen. Accuracy:0.9546 Error: 0.11530 Loss:0.17189 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.66s\n", - "1773520 Examples seen. Accuracy:0.9545 Error: 0.08569 Loss:0.12847 Threads: 8 Forward time: 4.88s Backward time: 3.21s Step time: 3.70s\n", - "1774160 Examples seen. Accuracy:0.9552 Error: 0.13763 Loss:0.09882 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.60s\n", - "1774800 Examples seen. Accuracy:0.9545 Error: 0.15904 Loss:0.15691 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.65s\n", - "1775440 Examples seen. Accuracy:0.9547 Error: 0.10099 Loss:0.20617 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.62s\n", - "1776080 Examples seen. Accuracy:0.9555 Error: 0.08378 Loss:0.05610 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.61s\n", - "1776720 Examples seen. Accuracy:0.9565 Error: 0.15303 Loss:0.18980 Threads: 8 Forward time: 4.90s Backward time: 3.22s Step time: 3.60s\n", - "1777360 Examples seen. Accuracy:0.9561 Error: 0.08778 Loss:0.05999 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.60s\n", - "1778000 Examples seen. Accuracy:0.9563 Error: 0.10252 Loss:0.19915 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "1778640 Examples seen. Accuracy:0.9563 Error: 0.10759 Loss:0.11619 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1779280 Examples seen. Accuracy:0.9564 Error: 0.10677 Loss:0.06706 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.63s\n", - "1779920 Examples seen. Accuracy:0.9573 Error: 0.03707 Loss:0.02065 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.61s\n", - "1780560 Examples seen. Accuracy:0.9578 Error: 0.06614 Loss:0.06260 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.61s\n", - "1781200 Examples seen. Accuracy:0.9575 Error: 0.14578 Loss:0.15337 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.62s\n", - "1781840 Examples seen. Accuracy:0.9564 Error: 0.14160 Loss:0.11122 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "1782480 Examples seen. Accuracy:0.9557 Error: 0.18747 Loss:0.18676 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1783120 Examples seen. Accuracy:0.9561 Error: 0.07407 Loss:0.05903 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.61s\n", - "1783760 Examples seen. Accuracy:0.9572 Error: 0.07691 Loss:0.14365 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.68s\n", - "1784400 Examples seen. Accuracy:0.9573 Error: 0.21152 Loss:0.29385 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.69s\n", - "1785040 Examples seen. Accuracy:0.9568 Error: 0.14541 Loss:0.23907 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1785680 Examples seen. Accuracy:0.9561 Error: 0.09648 Loss:0.08535 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "1786320 Examples seen. Accuracy:0.9560 Error: 0.07973 Loss:0.06297 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.67s\n", - "1786960 Examples seen. Accuracy:0.9550 Error: 0.14013 Loss:0.14395 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.69s\n", - "1787600 Examples seen. Accuracy:0.9557 Error: 0.09567 Loss:0.06545 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.63s\n", - "1788240 Examples seen. Accuracy:0.9554 Error: 0.05980 Loss:0.03617 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1788880 Examples seen. Accuracy:0.9549 Error: 0.07754 Loss:0.08508 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.65s\n", - "1789520 Examples seen. Accuracy:0.9551 Error: 0.05611 Loss:0.04648 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.65s\n", - "1790160 Examples seen. Accuracy:0.9543 Error: 0.17148 Loss:0.23611 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.73s\n", - "1790800 Examples seen. Accuracy:0.9545 Error: 0.07240 Loss:0.05229 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.75s\n", - "1791440 Examples seen. Accuracy:0.9560 Error: 0.17142 Loss:0.22017 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.81s\n", - "1792080 Examples seen. Accuracy:0.9574 Error: 0.14793 Loss:0.10845 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.69s\n", - "1792720 Examples seen. Accuracy:0.9576 Error: 0.07952 Loss:0.06775 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.66s\n", - "1793360 Examples seen. Accuracy:0.9568 Error: 0.19621 Loss:0.21966 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.64s\n", - "1794000 Examples seen. Accuracy:0.9568 Error: 0.09204 Loss:0.13662 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1794640 Examples seen. Accuracy:0.9559 Error: 0.14270 Loss:0.12991 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.64s\n", - "1795280 Examples seen. Accuracy:0.9555 Error: 0.06584 Loss:0.08834 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "1795920 Examples seen. Accuracy:0.9565 Error: 0.08207 Loss:0.06582 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.60s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 36 Examples seen:1796544 Validation Accuracy: 0.9841 Validation Error: 0.0469 Validation Loss: 0.0469 Total time: 195.42min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 36. Working time: 3.26 hours.\n", - "1797184 Examples seen. Accuracy:0.9564 Error: 0.11982 Loss:0.12715 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.78s\n", - "1797824 Examples seen. Accuracy:0.9573 Error: 0.06619 Loss:0.04556 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.66s\n", - "1798464 Examples seen. Accuracy:0.9571 Error: 0.18513 Loss:0.17599 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.65s\n", - "1799104 Examples seen. Accuracy:0.9559 Error: 0.15037 Loss:0.23883 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.70s\n", - "1799744 Examples seen. Accuracy:0.9545 Error: 0.10704 Loss:0.12288 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.70s\n", - "1800384 Examples seen. Accuracy:0.9549 Error: 0.12290 Loss:0.14029 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.64s\n", - "1801024 Examples seen. Accuracy:0.9547 Error: 0.09161 Loss:0.06599 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.67s\n", - "1801664 Examples seen. Accuracy:0.9550 Error: 0.07423 Loss:0.08698 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "1802304 Examples seen. Accuracy:0.9547 Error: 0.13005 Loss:0.10568 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.70s\n", - "1802944 Examples seen. Accuracy:0.9549 Error: 0.23751 Loss:0.30969 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.68s\n", - "1803584 Examples seen. Accuracy:0.9554 Error: 0.16388 Loss:0.16303 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.67s\n", - "1804224 Examples seen. Accuracy:0.9546 Error: 0.11821 Loss:0.08422 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.76s\n", - "1804864 Examples seen. Accuracy:0.9531 Error: 0.14995 Loss:0.14358 Threads: 8 Forward time: 5.16s Backward time: 3.30s Step time: 3.82s\n", - "1805504 Examples seen. Accuracy:0.9529 Error: 0.08595 Loss:0.07757 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.78s\n", - "1806144 Examples seen. Accuracy:0.9524 Error: 0.16161 Loss:0.17668 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.74s\n", - "1806784 Examples seen. Accuracy:0.9526 Error: 0.06927 Loss:0.04835 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.71s\n", - "1807424 Examples seen. Accuracy:0.9539 Error: 0.07400 Loss:0.04514 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.67s\n", - "1808064 Examples seen. Accuracy:0.9542 Error: 0.07363 Loss:0.06349 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.74s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1808704 Examples seen. Accuracy:0.9546 Error: 0.04456 Loss:0.02821 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.70s\n", - "1809344 Examples seen. Accuracy:0.9550 Error: 0.11859 Loss:0.11527 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.68s\n", - "1809984 Examples seen. Accuracy:0.9552 Error: 0.09323 Loss:0.06274 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.68s\n", - "1810624 Examples seen. Accuracy:0.9536 Error: 0.09031 Loss:0.08418 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.72s\n", - "1811264 Examples seen. Accuracy:0.9541 Error: 0.09636 Loss:0.09265 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.75s\n", - "1811904 Examples seen. Accuracy:0.9546 Error: 0.08280 Loss:0.06800 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.81s\n", - "1812544 Examples seen. Accuracy:0.9551 Error: 0.03604 Loss:0.02002 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.70s\n", - "1813184 Examples seen. Accuracy:0.9557 Error: 0.05730 Loss:0.03895 Threads: 8 Forward time: 5.19s Backward time: 3.27s Step time: 3.80s\n", - "1813824 Examples seen. Accuracy:0.9549 Error: 0.13987 Loss:0.20327 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.77s\n", - "1814464 Examples seen. Accuracy:0.9551 Error: 0.12910 Loss:0.13366 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.70s\n", - "1815104 Examples seen. Accuracy:0.9548 Error: 0.10973 Loss:0.19730 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.69s\n", - "1815744 Examples seen. Accuracy:0.9545 Error: 0.20201 Loss:0.26006 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.69s\n", - "1816384 Examples seen. Accuracy:0.9554 Error: 0.06838 Loss:0.09450 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.71s\n", - "1817024 Examples seen. Accuracy:0.9556 Error: 0.09218 Loss:0.14568 Threads: 8 Forward time: 5.16s Backward time: 3.33s Step time: 3.74s\n", - "1817664 Examples seen. Accuracy:0.9565 Error: 0.10247 Loss:0.08514 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.72s\n", - "1818304 Examples seen. Accuracy:0.9568 Error: 0.09302 Loss:0.07729 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.71s\n", - "1818944 Examples seen. Accuracy:0.9556 Error: 0.10434 Loss:0.10841 Threads: 8 Forward time: 5.16s Backward time: 3.30s Step time: 3.81s\n", - "1819584 Examples seen. Accuracy:0.9547 Error: 0.15566 Loss:0.17934 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.67s\n", - "1820224 Examples seen. Accuracy:0.9534 Error: 0.09787 Loss:0.09881 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.68s\n", - "1820864 Examples seen. Accuracy:0.9548 Error: 0.09613 Loss:0.08051 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.73s\n", - "1821504 Examples seen. Accuracy:0.9548 Error: 0.10382 Loss:0.17015 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "1822144 Examples seen. Accuracy:0.9555 Error: 0.08964 Loss:0.10181 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.64s\n", - "1822784 Examples seen. Accuracy:0.9545 Error: 0.14913 Loss:0.18329 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.61s\n", - "1823424 Examples seen. Accuracy:0.9543 Error: 0.12083 Loss:0.09757 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.62s\n", - "1824064 Examples seen. Accuracy:0.9546 Error: 0.10317 Loss:0.09144 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "1824704 Examples seen. Accuracy:0.9549 Error: 0.13307 Loss:0.14256 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.60s\n", - "1825344 Examples seen. Accuracy:0.9548 Error: 0.06797 Loss:0.07932 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "1825984 Examples seen. Accuracy:0.9549 Error: 0.10097 Loss:0.08659 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.61s\n", - "1826624 Examples seen. Accuracy:0.9549 Error: 0.17762 Loss:0.19080 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.61s\n", - "1827264 Examples seen. Accuracy:0.9541 Error: 0.15394 Loss:0.11577 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "1827904 Examples seen. Accuracy:0.9540 Error: 0.15268 Loss:0.13558 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.67s\n", - "1828544 Examples seen. Accuracy:0.9533 Error: 0.11208 Loss:0.11845 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.85s\n", - "1829184 Examples seen. Accuracy:0.9534 Error: 0.12593 Loss:0.12251 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.69s\n", - "1829824 Examples seen. Accuracy:0.9540 Error: 0.11013 Loss:0.07358 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.73s\n", - "1830464 Examples seen. Accuracy:0.9543 Error: 0.15936 Loss:0.15270 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.76s\n", - "1831104 Examples seen. Accuracy:0.9548 Error: 0.09720 Loss:0.11979 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.76s\n", - "1831744 Examples seen. Accuracy:0.9542 Error: 0.09560 Loss:0.08874 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.64s\n", - "1832384 Examples seen. Accuracy:0.9550 Error: 0.04208 Loss:0.02491 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.85s\n", - "1833024 Examples seen. Accuracy:0.9549 Error: 0.15678 Loss:0.17918 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.70s\n", - "1833664 Examples seen. Accuracy:0.9546 Error: 0.03412 Loss:0.01928 Threads: 8 Forward time: 5.26s Backward time: 3.34s Step time: 3.78s\n", - "1834304 Examples seen. Accuracy:0.9548 Error: 0.15983 Loss:0.18012 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.72s\n", - "1834944 Examples seen. Accuracy:0.9550 Error: 0.07554 Loss:0.08495 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.63s\n", - "1835584 Examples seen. Accuracy:0.9569 Error: 0.21244 Loss:0.28960 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.62s\n", - "1836224 Examples seen. Accuracy:0.9571 Error: 0.18464 Loss:0.29235 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.62s\n", - "1836864 Examples seen. Accuracy:0.9554 Error: 0.14169 Loss:0.16778 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.67s\n", - "1837504 Examples seen. Accuracy:0.9545 Error: 0.12011 Loss:0.13473 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.70s\n", - "1838144 Examples seen. Accuracy:0.9538 Error: 0.12203 Loss:0.12402 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.65s\n", - "1838784 Examples seen. Accuracy:0.9543 Error: 0.10910 Loss:0.07847 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.62s\n", - "1839424 Examples seen. Accuracy:0.9547 Error: 0.05406 Loss:0.03135 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.61s\n", - "1840064 Examples seen. Accuracy:0.9546 Error: 0.09812 Loss:0.07594 Threads: 8 Forward time: 5.02s Backward time: 3.19s Step time: 3.61s\n", - "1840704 Examples seen. Accuracy:0.9543 Error: 0.13101 Loss:0.13506 Threads: 8 Forward time: 5.07s Backward time: 3.21s Step time: 3.68s\n", - "1841344 Examples seen. Accuracy:0.9551 Error: 0.14208 Loss:0.13934 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.68s\n", - "1841984 Examples seen. Accuracy:0.9564 Error: 0.18013 Loss:0.20877 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.72s\n", - "1842624 Examples seen. Accuracy:0.9565 Error: 0.06567 Loss:0.05223 Threads: 8 Forward time: 4.92s Backward time: 3.17s Step time: 3.60s\n", - "1843264 Examples seen. Accuracy:0.9563 Error: 0.06501 Loss:0.08944 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "1843904 Examples seen. Accuracy:0.9570 Error: 0.09017 Loss:0.07286 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.61s\n", - "1844544 Examples seen. Accuracy:0.9574 Error: 0.11758 Loss:0.14370 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.62s\n", - "1845184 Examples seen. Accuracy:0.9570 Error: 0.09450 Loss:0.08837 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "1845824 Examples seen. Accuracy:0.9568 Error: 0.08396 Loss:0.09636 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.66s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "Epochs: 37 Examples seen:1846448 Validation Accuracy: 0.9841 Validation Error: 0.0461 Validation Loss: 0.0458 Total time: 200.69min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 37. Working time: 3.34 hours.\n", - "1847088 Examples seen. Accuracy:0.9562 Error: 0.11819 Loss:0.10164 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.93s\n", - "1847728 Examples seen. Accuracy:0.9574 Error: 0.10055 Loss:0.09452 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.64s\n", - "1848368 Examples seen. Accuracy:0.9561 Error: 0.09441 Loss:0.12030 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.63s\n", - "1849008 Examples seen. Accuracy:0.9565 Error: 0.08285 Loss:0.09775 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1849648 Examples seen. Accuracy:0.9560 Error: 0.11474 Loss:0.07778 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.66s\n", - "1850288 Examples seen. Accuracy:0.9552 Error: 0.13941 Loss:0.16255 Threads: 8 Forward time: 5.02s Backward time: 3.17s Step time: 3.67s\n", - "1850928 Examples seen. Accuracy:0.9540 Error: 0.16867 Loss:0.21479 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.70s\n", - "1851568 Examples seen. Accuracy:0.9552 Error: 0.06381 Loss:0.05751 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.68s\n", - "1852208 Examples seen. Accuracy:0.9542 Error: 0.16486 Loss:0.15809 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.67s\n", - "1852848 Examples seen. Accuracy:0.9545 Error: 0.13323 Loss:0.13841 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.68s\n", - "1853488 Examples seen. Accuracy:0.9561 Error: 0.15190 Loss:0.13007 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.71s\n", - "1854128 Examples seen. Accuracy:0.9550 Error: 0.09864 Loss:0.09541 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.69s\n", - "1854768 Examples seen. Accuracy:0.9544 Error: 0.10317 Loss:0.09194 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.69s\n", - "1855408 Examples seen. Accuracy:0.9553 Error: 0.08703 Loss:0.08090 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.70s\n", - "1856048 Examples seen. Accuracy:0.9551 Error: 0.08102 Loss:0.05976 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "1856688 Examples seen. Accuracy:0.9552 Error: 0.17611 Loss:0.20903 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.73s\n", - "1857328 Examples seen. Accuracy:0.9563 Error: 0.11412 Loss:0.19433 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.70s\n", - "1857968 Examples seen. Accuracy:0.9551 Error: 0.12622 Loss:0.10813 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.71s\n", - "1858608 Examples seen. Accuracy:0.9548 Error: 0.08798 Loss:0.07007 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.78s\n", - "1859248 Examples seen. Accuracy:0.9554 Error: 0.11709 Loss:0.11175 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.71s\n", - "1859888 Examples seen. Accuracy:0.9565 Error: 0.07906 Loss:0.08237 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.75s\n", - "1860528 Examples seen. Accuracy:0.9572 Error: 0.09042 Loss:0.11210 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.68s\n", - "1861168 Examples seen. Accuracy:0.9572 Error: 0.11350 Loss:0.13660 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.67s\n", - "1861808 Examples seen. Accuracy:0.9574 Error: 0.10039 Loss:0.10602 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.67s\n", - "1862448 Examples seen. Accuracy:0.9574 Error: 0.07011 Loss:0.07470 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.63s\n", - "1863088 Examples seen. Accuracy:0.9576 Error: 0.00929 Loss:0.00472 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "1863728 Examples seen. Accuracy:0.9566 Error: 0.12854 Loss:0.09041 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "1864368 Examples seen. Accuracy:0.9562 Error: 0.08720 Loss:0.08158 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.64s\n", - "1865008 Examples seen. Accuracy:0.9568 Error: 0.07177 Loss:0.04339 Threads: 8 Forward time: 5.12s Backward time: 3.26s Step time: 3.65s\n", - "1865648 Examples seen. Accuracy:0.9572 Error: 0.14646 Loss:0.15084 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.68s\n", - "1866288 Examples seen. Accuracy:0.9568 Error: 0.09418 Loss:0.11620 Threads: 8 Forward time: 5.63s Backward time: 3.60s Step time: 3.83s\n", - "1866928 Examples seen. Accuracy:0.9583 Error: 0.10999 Loss:0.07401 Threads: 8 Forward time: 5.09s Backward time: 3.25s Step time: 3.66s\n", - "1867568 Examples seen. Accuracy:0.9588 Error: 0.14740 Loss:0.15679 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.72s\n", - "1868208 Examples seen. Accuracy:0.9577 Error: 0.11466 Loss:0.14010 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.75s\n", - "1868848 Examples seen. Accuracy:0.9568 Error: 0.21458 Loss:0.22418 Threads: 8 Forward time: 4.97s Backward time: 3.16s Step time: 3.69s\n", - "1869488 Examples seen. Accuracy:0.9578 Error: 0.12067 Loss:0.14627 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.66s\n", - "1870128 Examples seen. Accuracy:0.9573 Error: 0.15151 Loss:0.10631 Threads: 8 Forward time: 5.05s Backward time: 3.21s Step time: 3.68s\n", - "1870768 Examples seen. Accuracy:0.9579 Error: 0.14390 Loss:0.16780 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.71s\n", - "1871408 Examples seen. Accuracy:0.9572 Error: 0.08003 Loss:0.05318 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.69s\n", - "1872048 Examples seen. Accuracy:0.9574 Error: 0.06995 Loss:0.05608 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.69s\n", - "1872688 Examples seen. Accuracy:0.9574 Error: 0.12168 Loss:0.13208 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.69s\n", - "1873328 Examples seen. Accuracy:0.9571 Error: 0.12677 Loss:0.12205 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.72s\n", - "1873968 Examples seen. Accuracy:0.9570 Error: 0.21795 Loss:0.28115 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.74s\n", - "1874608 Examples seen. Accuracy:0.9572 Error: 0.07045 Loss:0.04531 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.70s\n", - "1875248 Examples seen. Accuracy:0.9570 Error: 0.14072 Loss:0.14882 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.66s\n", - "1875888 Examples seen. Accuracy:0.9563 Error: 0.01611 Loss:0.00855 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.67s\n", - "1876528 Examples seen. Accuracy:0.9570 Error: 0.16832 Loss:0.25454 Threads: 8 Forward time: 4.99s Backward time: 3.18s Step time: 3.65s\n", - "1877168 Examples seen. Accuracy:0.9575 Error: 0.16379 Loss:0.18746 Threads: 8 Forward time: 5.00s Backward time: 3.18s Step time: 3.70s\n", - "1877808 Examples seen. Accuracy:0.9571 Error: 0.13020 Loss:0.19135 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.63s\n", - "1878448 Examples seen. Accuracy:0.9567 Error: 0.06094 Loss:0.03615 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.67s\n", - "1879088 Examples seen. Accuracy:0.9560 Error: 0.11889 Loss:0.09509 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.69s\n", - "1879728 Examples seen. Accuracy:0.9549 Error: 0.27287 Loss:0.31518 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.67s\n", - "1880368 Examples seen. Accuracy:0.9562 Error: 0.10835 Loss:0.07995 Threads: 8 Forward time: 4.96s Backward time: 3.18s Step time: 3.65s\n", - "1881008 Examples seen. Accuracy:0.9566 Error: 0.09757 Loss:0.09872 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.65s\n", - "1881648 Examples seen. Accuracy:0.9569 Error: 0.11494 Loss:0.15117 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.68s\n", - "1882288 Examples seen. Accuracy:0.9579 Error: 0.05830 Loss:0.04952 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.71s\n", - "1882928 Examples seen. Accuracy:0.9581 Error: 0.08375 Loss:0.14745 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1883568 Examples seen. Accuracy:0.9577 Error: 0.09592 Loss:0.11037 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.66s\n", - "1884208 Examples seen. Accuracy:0.9576 Error: 0.16874 Loss:0.19138 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.70s\n", - "1884848 Examples seen. Accuracy:0.9571 Error: 0.06494 Loss:0.04911 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.64s\n", - "1885488 Examples seen. Accuracy:0.9567 Error: 0.16075 Loss:0.16678 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.69s\n", - "1886128 Examples seen. Accuracy:0.9574 Error: 0.04330 Loss:0.02504 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.75s\n", - "1886768 Examples seen. Accuracy:0.9581 Error: 0.01211 Loss:0.00622 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.67s\n", - "1887408 Examples seen. Accuracy:0.9580 Error: 0.08208 Loss:0.07149 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "1888048 Examples seen. Accuracy:0.9592 Error: 0.07264 Loss:0.06598 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1888688 Examples seen. Accuracy:0.9596 Error: 0.14589 Loss:0.20231 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.67s\n", - "1889328 Examples seen. Accuracy:0.9588 Error: 0.12981 Loss:0.14985 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.64s\n", - "1889968 Examples seen. Accuracy:0.9592 Error: 0.10374 Loss:0.14170 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.69s\n", - "1890608 Examples seen. Accuracy:0.9592 Error: 0.04690 Loss:0.03925 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.63s\n", - "1891248 Examples seen. Accuracy:0.9580 Error: 0.17447 Loss:0.17345 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.65s\n", - "1891888 Examples seen. Accuracy:0.9584 Error: 0.06886 Loss:0.06462 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.67s\n", - "1892528 Examples seen. Accuracy:0.9575 Error: 0.01526 Loss:0.00851 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.69s\n", - "1893168 Examples seen. Accuracy:0.9581 Error: 0.09771 Loss:0.08287 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.63s\n", - "1893808 Examples seen. Accuracy:0.9580 Error: 0.07808 Loss:0.10820 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.62s\n", - "1894448 Examples seen. Accuracy:0.9578 Error: 0.15686 Loss:0.15807 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.64s\n", - "1895088 Examples seen. Accuracy:0.9562 Error: 0.21945 Loss:0.23353 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "1895728 Examples seen. Accuracy:0.9578 Error: 0.07588 Loss:0.07329 Threads: 8 Forward time: 4.91s Backward time: 3.19s Step time: 3.60s\n", - "Starting Validation.\n", - "Epochs: 38 Examples seen:1896352 Validation Accuracy: 0.9830 Validation Error: 0.0451 Validation Loss: 0.0452 Total time: 205.94min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.8 hours.\n", - "Epochs: 38. Working time: 3.43 hours.\n", - "1896992 Examples seen. Accuracy:0.9574 Error: 0.06946 Loss:0.05478 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "1897632 Examples seen. Accuracy:0.9573 Error: 0.19728 Loss:0.16359 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.60s\n", - "1898272 Examples seen. Accuracy:0.9575 Error: 0.09539 Loss:0.12287 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.63s\n", - "1898912 Examples seen. Accuracy:0.9571 Error: 0.04800 Loss:0.02670 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.64s\n", - "1899552 Examples seen. Accuracy:0.9582 Error: 0.08141 Loss:0.05097 Threads: 8 Forward time: 4.93s Backward time: 3.19s Step time: 3.61s\n", - "1900192 Examples seen. Accuracy:0.9575 Error: 0.15912 Loss:0.13423 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "1900832 Examples seen. Accuracy:0.9576 Error: 0.05816 Loss:0.05678 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.63s\n", - "1901472 Examples seen. Accuracy:0.9561 Error: 0.06190 Loss:0.05640 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.69s\n", - "1902112 Examples seen. Accuracy:0.9558 Error: 0.12134 Loss:0.08576 Threads: 8 Forward time: 5.16s Backward time: 3.26s Step time: 3.74s\n", - "1902752 Examples seen. Accuracy:0.9560 Error: 0.07206 Loss:0.04274 Threads: 8 Forward time: 5.10s Backward time: 3.25s Step time: 3.72s\n", - "1903392 Examples seen. Accuracy:0.9547 Error: 0.07398 Loss:0.08184 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.82s\n", - "1904032 Examples seen. Accuracy:0.9545 Error: 0.13930 Loss:0.09375 Threads: 8 Forward time: 5.12s Backward time: 3.27s Step time: 3.84s\n", - "1904672 Examples seen. Accuracy:0.9559 Error: 0.11337 Loss:0.09635 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.71s\n", - "1905312 Examples seen. Accuracy:0.9562 Error: 0.02659 Loss:0.01801 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.69s\n", - "1905952 Examples seen. Accuracy:0.9562 Error: 0.09483 Loss:0.18519 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.62s\n", - "1906592 Examples seen. Accuracy:0.9560 Error: 0.13859 Loss:0.21150 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "1907232 Examples seen. Accuracy:0.9569 Error: 0.07786 Loss:0.09962 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1907872 Examples seen. Accuracy:0.9575 Error: 0.12255 Loss:0.14138 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.63s\n", - "1908512 Examples seen. Accuracy:0.9575 Error: 0.10497 Loss:0.10718 Threads: 8 Forward time: 4.98s Backward time: 3.16s Step time: 3.63s\n", - "1909152 Examples seen. Accuracy:0.9571 Error: 0.07362 Loss:0.05523 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "1909792 Examples seen. Accuracy:0.9558 Error: 0.23202 Loss:0.31924 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.69s\n", - "1910432 Examples seen. Accuracy:0.9543 Error: 0.18658 Loss:0.17026 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.64s\n", - "1911072 Examples seen. Accuracy:0.9530 Error: 0.20928 Loss:0.21472 Threads: 8 Forward time: 5.11s Backward time: 3.19s Step time: 3.73s\n", - "1911712 Examples seen. Accuracy:0.9522 Error: 0.12081 Loss:0.15239 Threads: 8 Forward time: 5.00s Backward time: 3.17s Step time: 3.69s\n", - "1912352 Examples seen. Accuracy:0.9524 Error: 0.07168 Loss:0.04697 Threads: 8 Forward time: 5.00s Backward time: 3.18s Step time: 3.64s\n", - "1912992 Examples seen. Accuracy:0.9532 Error: 0.05348 Loss:0.03226 Threads: 8 Forward time: 5.03s Backward time: 3.20s Step time: 3.65s\n", - "1913632 Examples seen. Accuracy:0.9546 Error: 0.08172 Loss:0.10057 Threads: 8 Forward time: 5.11s Backward time: 3.22s Step time: 3.69s\n", - "1914272 Examples seen. Accuracy:0.9549 Error: 0.10117 Loss:0.10588 Threads: 8 Forward time: 5.13s Backward time: 3.26s Step time: 3.72s\n", - "1914912 Examples seen. Accuracy:0.9567 Error: 0.13670 Loss:0.13147 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.67s\n", - "1915552 Examples seen. Accuracy:0.9560 Error: 0.07304 Loss:0.05058 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.69s\n", - "1916192 Examples seen. Accuracy:0.9560 Error: 0.07862 Loss:0.06043 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.67s\n", - "1916832 Examples seen. Accuracy:0.9558 Error: 0.10212 Loss:0.10555 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.71s\n", - "1917472 Examples seen. Accuracy:0.9562 Error: 0.06964 Loss:0.05000 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.74s\n", - "1918112 Examples seen. Accuracy:0.9572 Error: 0.06620 Loss:0.05128 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.67s\n", - "1918752 Examples seen. Accuracy:0.9590 Error: 0.06132 Loss:0.04567 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.66s\n", - "1919392 Examples seen. Accuracy:0.9596 Error: 0.10776 Loss:0.12338 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "1920032 Examples seen. Accuracy:0.9587 Error: 0.12756 Loss:0.14233 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1920672 Examples seen. Accuracy:0.9579 Error: 0.16522 Loss:0.20416 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "1921312 Examples seen. Accuracy:0.9565 Error: 0.13033 Loss:0.15052 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.64s\n", - "1921952 Examples seen. Accuracy:0.9570 Error: 0.07184 Loss:0.04647 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.63s\n", - "1922592 Examples seen. Accuracy:0.9582 Error: 0.08658 Loss:0.15437 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1923232 Examples seen. Accuracy:0.9582 Error: 0.11232 Loss:0.12390 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.64s\n", - "1923872 Examples seen. Accuracy:0.9580 Error: 0.14932 Loss:0.13402 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.61s\n", - "1924512 Examples seen. Accuracy:0.9577 Error: 0.02818 Loss:0.01480 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.62s\n", - "1925152 Examples seen. Accuracy:0.9575 Error: 0.03215 Loss:0.01707 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.65s\n", - "1925792 Examples seen. Accuracy:0.9569 Error: 0.09871 Loss:0.08763 Threads: 8 Forward time: 5.09s Backward time: 3.22s Step time: 3.63s\n", - "1926432 Examples seen. Accuracy:0.9567 Error: 0.18634 Loss:0.24356 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.65s\n", - "1927072 Examples seen. Accuracy:0.9566 Error: 0.13298 Loss:0.11218 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.66s\n", - "1927712 Examples seen. Accuracy:0.9557 Error: 0.12890 Loss:0.11758 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1928352 Examples seen. Accuracy:0.9558 Error: 0.16920 Loss:0.15208 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.64s\n", - "1928992 Examples seen. Accuracy:0.9550 Error: 0.10400 Loss:0.16673 Threads: 8 Forward time: 5.07s Backward time: 3.21s Step time: 3.64s\n", - "1929632 Examples seen. Accuracy:0.9551 Error: 0.16371 Loss:0.15684 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.63s\n", - "1930272 Examples seen. Accuracy:0.9549 Error: 0.13895 Loss:0.11924 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.65s\n", - "1930912 Examples seen. Accuracy:0.9553 Error: 0.12780 Loss:0.08297 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.66s\n", - "1931552 Examples seen. Accuracy:0.9566 Error: 0.03757 Loss:0.02385 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "1932192 Examples seen. Accuracy:0.9576 Error: 0.11039 Loss:0.11438 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.67s\n", - "1932832 Examples seen. Accuracy:0.9572 Error: 0.12712 Loss:0.15825 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "1933472 Examples seen. Accuracy:0.9571 Error: 0.11167 Loss:0.12649 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.64s\n", - "1934112 Examples seen. Accuracy:0.9571 Error: 0.07529 Loss:0.05379 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.63s\n", - "1934752 Examples seen. Accuracy:0.9574 Error: 0.05598 Loss:0.03662 Threads: 8 Forward time: 4.95s Backward time: 3.19s Step time: 3.62s\n", - "1935392 Examples seen. Accuracy:0.9576 Error: 0.06798 Loss:0.07322 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.62s\n", - "1936032 Examples seen. Accuracy:0.9570 Error: 0.09908 Loss:0.08005 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.67s\n", - "1936672 Examples seen. Accuracy:0.9558 Error: 0.07398 Loss:0.05030 Threads: 8 Forward time: 4.96s Backward time: 3.18s Step time: 3.65s\n", - "1937312 Examples seen. Accuracy:0.9546 Error: 0.08393 Loss:0.07516 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.63s\n", - "1937952 Examples seen. Accuracy:0.9545 Error: 0.09262 Loss:0.08663 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.64s\n", - "1938592 Examples seen. Accuracy:0.9553 Error: 0.09247 Loss:0.08795 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "1939232 Examples seen. Accuracy:0.9556 Error: 0.15727 Loss:0.15402 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.64s\n", - "1939872 Examples seen. Accuracy:0.9557 Error: 0.13277 Loss:0.12644 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.64s\n", - "1940512 Examples seen. Accuracy:0.9551 Error: 0.13032 Loss:0.11599 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.62s\n", - "1941152 Examples seen. Accuracy:0.9545 Error: 0.13947 Loss:0.12609 Threads: 8 Forward time: 4.94s Backward time: 3.14s Step time: 3.61s\n", - "1941792 Examples seen. Accuracy:0.9551 Error: 0.10305 Loss:0.07518 Threads: 8 Forward time: 4.90s Backward time: 3.17s Step time: 3.60s\n", - "1942432 Examples seen. Accuracy:0.9545 Error: 0.11349 Loss:0.10990 Threads: 8 Forward time: 5.02s Backward time: 3.18s Step time: 3.69s\n", - "1943072 Examples seen. Accuracy:0.9556 Error: 0.09067 Loss:0.11182 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n", - "1943712 Examples seen. Accuracy:0.9552 Error: 0.10892 Loss:0.09714 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.64s\n", - "1944352 Examples seen. Accuracy:0.9562 Error: 0.07942 Loss:0.06084 Threads: 8 Forward time: 5.08s Backward time: 3.24s Step time: 3.65s\n", - "1944992 Examples seen. Accuracy:0.9557 Error: 0.02260 Loss:0.01171 Threads: 8 Forward time: 5.10s Backward time: 3.25s Step time: 3.68s\n", - "1945632 Examples seen. Accuracy:0.9556 Error: 0.12798 Loss:0.20617 Threads: 8 Forward time: 5.08s Backward time: 3.23s Step time: 3.81s\n", - "Starting Validation.\n", - "Epochs: 39 Examples seen:1946256 Validation Accuracy: 0.9833 Validation Error: 0.0440 Validation Loss: 0.0443 Total time: 211.17min\n", - "Epoch time: 5 minutes. 100 epochs: 8.3 hours.\n", - "Epochs: 39. Working time: 3.52 hours.\n", - "1946896 Examples seen. Accuracy:0.9548 Error: 0.07596 Loss:0.06946 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.78s\n", - "1947536 Examples seen. Accuracy:0.9547 Error: 0.03122 Loss:0.01731 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.70s\n", - "1948176 Examples seen. Accuracy:0.9543 Error: 0.08073 Loss:0.05983 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.66s\n", - "1948816 Examples seen. Accuracy:0.9541 Error: 0.05202 Loss:0.03001 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.62s\n", - "1949456 Examples seen. Accuracy:0.9559 Error: 0.01688 Loss:0.00939 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.67s\n", - "1950096 Examples seen. Accuracy:0.9570 Error: 0.12699 Loss:0.11698 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.66s\n", - "1950736 Examples seen. Accuracy:0.9560 Error: 0.11172 Loss:0.07707 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.65s\n", - "1951376 Examples seen. Accuracy:0.9562 Error: 0.11830 Loss:0.09813 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.65s\n", - "1952016 Examples seen. Accuracy:0.9566 Error: 0.07522 Loss:0.05643 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.65s\n", - "1952656 Examples seen. Accuracy:0.9567 Error: 0.09331 Loss:0.07104 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "1953296 Examples seen. Accuracy:0.9572 Error: 0.16513 Loss:0.15516 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.71s\n", - "1953936 Examples seen. Accuracy:0.9578 Error: 0.12486 Loss:0.16671 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.70s\n", - "1954576 Examples seen. Accuracy:0.9589 Error: 0.02811 Loss:0.01570 Threads: 8 Forward time: 5.08s Backward time: 3.22s Step time: 3.76s\n", - "1955216 Examples seen. Accuracy:0.9580 Error: 0.08625 Loss:0.06506 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.68s\n", - "1955856 Examples seen. Accuracy:0.9587 Error: 0.10249 Loss:0.07206 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.68s\n", - "1956496 Examples seen. Accuracy:0.9573 Error: 0.13614 Loss:0.11562 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "1957136 Examples seen. Accuracy:0.9578 Error: 0.07779 Loss:0.05110 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1957776 Examples seen. Accuracy:0.9568 Error: 0.23783 Loss:0.28378 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.68s\n", - "1958416 Examples seen. Accuracy:0.9557 Error: 0.04999 Loss:0.03311 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.70s\n", - "1959056 Examples seen. Accuracy:0.9553 Error: 0.14057 Loss:0.14434 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.72s\n", - "1959696 Examples seen. Accuracy:0.9546 Error: 0.11904 Loss:0.12703 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.69s\n", - "1960336 Examples seen. Accuracy:0.9534 Error: 0.07844 Loss:0.07202 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.71s\n", - "1960976 Examples seen. Accuracy:0.9535 Error: 0.04302 Loss:0.03824 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.75s\n", - "1961616 Examples seen. Accuracy:0.9549 Error: 0.05744 Loss:0.03402 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.71s\n", - "1962256 Examples seen. Accuracy:0.9554 Error: 0.07823 Loss:0.08531 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.70s\n", - "1962896 Examples seen. Accuracy:0.9546 Error: 0.11356 Loss:0.12044 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.67s\n", - "1963536 Examples seen. Accuracy:0.9541 Error: 0.12428 Loss:0.11704 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.66s\n", - "1964176 Examples seen. Accuracy:0.9549 Error: 0.07645 Loss:0.05267 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.66s\n", - "1964816 Examples seen. Accuracy:0.9552 Error: 0.15385 Loss:0.17721 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "1965456 Examples seen. Accuracy:0.9547 Error: 0.18578 Loss:0.31996 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.67s\n", - "1966096 Examples seen. Accuracy:0.9539 Error: 0.07841 Loss:0.06008 Threads: 8 Forward time: 4.94s Backward time: 3.21s Step time: 3.65s\n", - "1966736 Examples seen. Accuracy:0.9542 Error: 0.13138 Loss:0.14388 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n", - "1967376 Examples seen. Accuracy:0.9552 Error: 0.07811 Loss:0.06564 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.65s\n", - "1968016 Examples seen. Accuracy:0.9547 Error: 0.06264 Loss:0.04557 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.63s\n", - "1968656 Examples seen. Accuracy:0.9550 Error: 0.15017 Loss:0.13880 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "1969296 Examples seen. Accuracy:0.9560 Error: 0.16495 Loss:0.30835 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.66s\n", - "1969936 Examples seen. Accuracy:0.9562 Error: 0.15374 Loss:0.14266 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.66s\n", - "1970576 Examples seen. Accuracy:0.9570 Error: 0.16502 Loss:0.24308 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "1971216 Examples seen. Accuracy:0.9572 Error: 0.12129 Loss:0.13275 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "1971856 Examples seen. Accuracy:0.9568 Error: 0.11641 Loss:0.14385 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.64s\n", - "1972496 Examples seen. Accuracy:0.9572 Error: 0.11184 Loss:0.08962 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.66s\n", - "1973136 Examples seen. Accuracy:0.9574 Error: 0.13011 Loss:0.12522 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.67s\n", - "1973776 Examples seen. Accuracy:0.9578 Error: 0.13604 Loss:0.12373 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.67s\n", - "1974416 Examples seen. Accuracy:0.9558 Error: 0.20894 Loss:0.21032 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.68s\n", - "1975056 Examples seen. Accuracy:0.9551 Error: 0.08364 Loss:0.06744 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "1975696 Examples seen. Accuracy:0.9564 Error: 0.08421 Loss:0.10634 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.71s\n", - "1976336 Examples seen. Accuracy:0.9566 Error: 0.04915 Loss:0.03122 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.67s\n", - "1976976 Examples seen. Accuracy:0.9555 Error: 0.08979 Loss:0.21834 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.65s\n", - "1977616 Examples seen. Accuracy:0.9564 Error: 0.08140 Loss:0.08624 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "1978256 Examples seen. Accuracy:0.9575 Error: 0.03626 Loss:0.02001 Threads: 8 Forward time: 5.07s Backward time: 3.27s Step time: 3.68s\n", - "1978896 Examples seen. Accuracy:0.9575 Error: 0.11398 Loss:0.14554 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.64s\n", - "1979536 Examples seen. Accuracy:0.9568 Error: 0.11191 Loss:0.08264 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.67s\n", - "1980176 Examples seen. Accuracy:0.9575 Error: 0.04127 Loss:0.02301 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.67s\n", - "1980816 Examples seen. Accuracy:0.9566 Error: 0.10878 Loss:0.14415 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.67s\n", - "1981456 Examples seen. Accuracy:0.9571 Error: 0.17045 Loss:0.24951 Threads: 8 Forward time: 5.10s Backward time: 3.21s Step time: 3.70s\n", - "1982096 Examples seen. Accuracy:0.9572 Error: 0.11920 Loss:0.13092 Threads: 8 Forward time: 5.10s Backward time: 3.22s Step time: 3.69s\n", - "1982736 Examples seen. Accuracy:0.9563 Error: 0.11615 Loss:0.12482 Threads: 8 Forward time: 5.12s Backward time: 3.28s Step time: 3.70s\n", - "1983376 Examples seen. Accuracy:0.9565 Error: 0.11549 Loss:0.10239 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.71s\n", - "1984016 Examples seen. Accuracy:0.9550 Error: 0.16654 Loss:0.23019 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.67s\n", - "1984656 Examples seen. Accuracy:0.9551 Error: 0.08989 Loss:0.05566 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.71s\n", - "1985296 Examples seen. Accuracy:0.9553 Error: 0.12084 Loss:0.17785 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.69s\n", - "1985936 Examples seen. Accuracy:0.9548 Error: 0.13153 Loss:0.12217 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.69s\n", - "1986576 Examples seen. Accuracy:0.9553 Error: 0.19813 Loss:0.16785 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.71s\n", - "1987216 Examples seen. Accuracy:0.9565 Error: 0.05785 Loss:0.03622 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "1987856 Examples seen. Accuracy:0.9557 Error: 0.09124 Loss:0.07761 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.67s\n", - "1988496 Examples seen. Accuracy:0.9537 Error: 0.16184 Loss:0.15157 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.68s\n", - "1989136 Examples seen. Accuracy:0.9538 Error: 0.14168 Loss:0.16176 Threads: 8 Forward time: 5.05s Backward time: 3.22s Step time: 3.69s\n", - "1989776 Examples seen. Accuracy:0.9537 Error: 0.09993 Loss:0.08669 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.67s\n", - "1990416 Examples seen. Accuracy:0.9548 Error: 0.13575 Loss:0.14654 Threads: 8 Forward time: 5.04s Backward time: 3.21s Step time: 3.67s\n", - "1991056 Examples seen. Accuracy:0.9562 Error: 0.12144 Loss:0.12310 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.67s\n", - "1991696 Examples seen. Accuracy:0.9563 Error: 0.06950 Loss:0.10158 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.71s\n", - "1992336 Examples seen. Accuracy:0.9563 Error: 0.11919 Loss:0.11302 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.74s\n", - "1992976 Examples seen. Accuracy:0.9565 Error: 0.10072 Loss:0.09323 Threads: 8 Forward time: 5.07s Backward time: 3.23s Step time: 3.74s\n", - "1993616 Examples seen. Accuracy:0.9569 Error: 0.05778 Loss:0.03738 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.68s\n", - "1994256 Examples seen. Accuracy:0.9577 Error: 0.11338 Loss:0.17860 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "1994896 Examples seen. Accuracy:0.9570 Error: 0.09030 Loss:0.06168 Threads: 8 Forward time: 5.06s Backward time: 3.22s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "1995536 Examples seen. Accuracy:0.9584 Error: 0.15258 Loss:0.10264 Threads: 8 Forward time: 5.01s Backward time: 3.18s Step time: 3.66s\n", - "Starting Validation.\n", - "Epochs: 40 Examples seen:1996160 Validation Accuracy: 0.9833 Validation Error: 0.0438 Validation Loss: 0.0430 Total time: 216.43min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.454 Min Weight: -0.354 Max Output: 6.081 Min Output: -5.892 TNNetConvolutionLinear 66,66,64 Times: 8.56s 0.40s Parent:0\n", - "Layer 2 Max Output: 6.081 Min Output: -3.439 TNNetMaxPool 33,33,64 Times: 3.62s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.640 Min Weight: 0.237 Max Output: 9.128 Min Output: -5.741 TNNetMovingStdNormalization 33,33,64 Times: 0.30s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.415 Min Weight: -0.223 Max Output: 11.363 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.83s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.385 Min Weight: -0.340 Max Output: 11.486 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.85s 0.18s Parent:4\n", - "Layer 6 Max Output: 11.486 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.50s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.435 Min Weight: -0.271 Max Output: 8.026 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.42s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.271 Min Weight: -0.236 Max Output: 6.715 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.49s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.244 Min Weight: -0.216 Max Output: 12.481 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.44s 0.02s Parent:8\n", - "Layer 10 Max Output: 12.481 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 12.481 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.04s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.387 Min Weight: -0.402 Max Output: 36.226 Min Output: -16.666 TNNetFullConnectLinear 39,1,1 Times: 0.03s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Starting Testing.\n", - "Epochs: 40 Examples seen:1996160 Test Accuracy: 0.9877 Test Error: 0.0364 Test Loss: 0.0344 Total time: 216.91min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 40. Working time: 3.62 hours.\n", - "Learning rate set to:0.00067\n", - "1996800 Examples seen. Accuracy:0.9603 Error: 0.01858 Loss:0.01055 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.79s\n", - "1997440 Examples seen. Accuracy:0.9612 Error: 0.06751 Loss:0.04420 Threads: 8 Forward time: 5.05s Backward time: 3.20s Step time: 3.70s\n", - "1998080 Examples seen. Accuracy:0.9634 Error: 0.05969 Loss:0.03811 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.66s\n", - "1998720 Examples seen. Accuracy:0.9636 Error: 0.06211 Loss:0.05980 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.66s\n", - "1999360 Examples seen. Accuracy:0.9627 Error: 0.19253 Loss:0.23533 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.79s\n", - "2000000 Examples seen. Accuracy:0.9621 Error: 0.05609 Loss:0.08227 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.69s\n", - "2000640 Examples seen. Accuracy:0.9602 Error: 0.09609 Loss:0.25120 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.69s\n", - "2001280 Examples seen. Accuracy:0.9606 Error: 0.10851 Loss:0.09629 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.86s\n", - "2001920 Examples seen. Accuracy:0.9596 Error: 0.09719 Loss:0.07179 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.70s\n", - "2002560 Examples seen. Accuracy:0.9606 Error: 0.17319 Loss:0.14447 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.72s\n", - "2003200 Examples seen. Accuracy:0.9601 Error: 0.07813 Loss:0.05612 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.69s\n", - "2003840 Examples seen. Accuracy:0.9610 Error: 0.06347 Loss:0.04018 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.69s\n", - "2004480 Examples seen. Accuracy:0.9611 Error: 0.16046 Loss:0.12981 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.74s\n", - "2005120 Examples seen. Accuracy:0.9615 Error: 0.08533 Loss:0.06469 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.74s\n", - "2005760 Examples seen. Accuracy:0.9624 Error: 0.12167 Loss:0.13669 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.68s\n", - "2006400 Examples seen. Accuracy:0.9635 Error: 0.07194 Loss:0.04741 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.73s\n", - "2007040 Examples seen. Accuracy:0.9637 Error: 0.11450 Loss:0.10282 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.74s\n", - "2007680 Examples seen. Accuracy:0.9641 Error: 0.09521 Loss:0.14021 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.65s\n", - "2008320 Examples seen. Accuracy:0.9639 Error: 0.05709 Loss:0.04800 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.63s\n", - "2008960 Examples seen. Accuracy:0.9639 Error: 0.08175 Loss:0.06516 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2009600 Examples seen. Accuracy:0.9637 Error: 0.15362 Loss:0.18719 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.68s\n", - "2010240 Examples seen. Accuracy:0.9632 Error: 0.11108 Loss:0.08708 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.72s\n", - "2010880 Examples seen. Accuracy:0.9632 Error: 0.04366 Loss:0.06930 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "2011520 Examples seen. Accuracy:0.9615 Error: 0.11517 Loss:0.13926 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.66s\n", - "2012160 Examples seen. Accuracy:0.9600 Error: 0.09046 Loss:0.10264 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.62s\n", - "2012800 Examples seen. Accuracy:0.9592 Error: 0.16845 Loss:0.19150 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.62s\n", - "2013440 Examples seen. Accuracy:0.9588 Error: 0.04481 Loss:0.03457 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.62s\n", - "2014080 Examples seen. Accuracy:0.9599 Error: 0.13315 Loss:0.09594 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2014720 Examples seen. Accuracy:0.9586 Error: 0.13077 Loss:0.10909 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.64s\n", - "2015360 Examples seen. Accuracy:0.9598 Error: 0.09163 Loss:0.07449 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.75s\n", - "2016000 Examples seen. Accuracy:0.9592 Error: 0.07351 Loss:0.06709 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.73s\n", - "2016640 Examples seen. Accuracy:0.9590 Error: 0.10505 Loss:0.08804 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.70s\n", - "2017280 Examples seen. Accuracy:0.9602 Error: 0.06601 Loss:0.04553 Threads: 8 Forward time: 5.23s Backward time: 3.37s Step time: 3.85s\n", - "2017920 Examples seen. Accuracy:0.9598 Error: 0.12744 Loss:0.14823 Threads: 8 Forward time: 5.10s Backward time: 3.23s Step time: 3.78s\n", - "2018560 Examples seen. Accuracy:0.9596 Error: 0.16724 Loss:0.20206 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.82s\n", - "2019200 Examples seen. Accuracy:0.9599 Error: 0.06914 Loss:0.07514 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "2019840 Examples seen. Accuracy:0.9611 Error: 0.14347 Loss:0.16205 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.67s\n", - "2020480 Examples seen. Accuracy:0.9614 Error: 0.07310 Loss:0.06299 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.66s\n", - "2021120 Examples seen. Accuracy:0.9615 Error: 0.12056 Loss:0.09817 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.69s\n", - "2021760 Examples seen. Accuracy:0.9614 Error: 0.04295 Loss:0.02542 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2022400 Examples seen. Accuracy:0.9617 Error: 0.08384 Loss:0.15203 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2023040 Examples seen. Accuracy:0.9616 Error: 0.14660 Loss:0.15672 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.67s\n", - "2023680 Examples seen. Accuracy:0.9625 Error: 0.10610 Loss:0.15967 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.66s\n", - "2024320 Examples seen. Accuracy:0.9620 Error: 0.10287 Loss:0.11911 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.67s\n", - "2024960 Examples seen. Accuracy:0.9626 Error: 0.04699 Loss:0.02923 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "2025600 Examples seen. Accuracy:0.9624 Error: 0.10360 Loss:0.11218 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.70s\n", - "2026240 Examples seen. Accuracy:0.9625 Error: 0.07975 Loss:0.05880 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.68s\n", - "2026880 Examples seen. Accuracy:0.9618 Error: 0.06190 Loss:0.03965 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.69s\n", - "2027520 Examples seen. Accuracy:0.9614 Error: 0.08094 Loss:0.05676 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.66s\n", - "2028160 Examples seen. Accuracy:0.9609 Error: 0.11550 Loss:0.11287 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.64s\n", - "2028800 Examples seen. Accuracy:0.9601 Error: 0.15215 Loss:0.22587 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2029440 Examples seen. Accuracy:0.9597 Error: 0.14517 Loss:0.12190 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.68s\n", - "2030080 Examples seen. Accuracy:0.9593 Error: 0.15409 Loss:0.10977 Threads: 8 Forward time: 4.95s Backward time: 3.16s Step time: 3.69s\n", - "2030720 Examples seen. Accuracy:0.9590 Error: 0.09567 Loss:0.11963 Threads: 8 Forward time: 4.96s Backward time: 3.17s Step time: 3.63s\n", - "2031360 Examples seen. Accuracy:0.9598 Error: 0.11987 Loss:0.08958 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.67s\n", - "2032000 Examples seen. Accuracy:0.9595 Error: 0.14072 Loss:0.22758 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.64s\n", - "2032640 Examples seen. Accuracy:0.9597 Error: 0.09160 Loss:0.07322 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2033280 Examples seen. Accuracy:0.9592 Error: 0.11376 Loss:0.09755 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.64s\n", - "2033920 Examples seen. Accuracy:0.9605 Error: 0.10646 Loss:0.16786 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.65s\n", - "2034560 Examples seen. Accuracy:0.9605 Error: 0.19598 Loss:0.17189 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.69s\n", - "2035200 Examples seen. Accuracy:0.9597 Error: 0.12555 Loss:0.14887 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.65s\n", - "2035840 Examples seen. Accuracy:0.9600 Error: 0.09475 Loss:0.07416 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.68s\n", - "2036480 Examples seen. Accuracy:0.9592 Error: 0.10563 Loss:0.10907 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.65s\n", - "2037120 Examples seen. Accuracy:0.9594 Error: 0.08218 Loss:0.05294 Threads: 8 Forward time: 5.06s Backward time: 3.21s Step time: 3.65s\n", - "2037760 Examples seen. Accuracy:0.9606 Error: 0.10311 Loss:0.11156 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.65s\n", - "2038400 Examples seen. Accuracy:0.9602 Error: 0.12782 Loss:0.10691 Threads: 8 Forward time: 4.93s Backward time: 3.19s Step time: 3.64s\n", - "2039040 Examples seen. Accuracy:0.9596 Error: 0.06344 Loss:0.04305 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2039680 Examples seen. Accuracy:0.9605 Error: 0.06420 Loss:0.03667 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.68s\n", - "2040320 Examples seen. Accuracy:0.9619 Error: 0.08777 Loss:0.07223 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.65s\n", - "2040960 Examples seen. Accuracy:0.9613 Error: 0.08049 Loss:0.08330 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.70s\n", - "2041600 Examples seen. Accuracy:0.9617 Error: 0.03486 Loss:0.02029 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "2042240 Examples seen. Accuracy:0.9610 Error: 0.16104 Loss:0.19287 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.64s\n", - "2042880 Examples seen. Accuracy:0.9602 Error: 0.06730 Loss:0.05337 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2043520 Examples seen. Accuracy:0.9598 Error: 0.16222 Loss:0.17415 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "2044160 Examples seen. Accuracy:0.9611 Error: 0.12666 Loss:0.16967 Threads: 8 Forward time: 4.95s Backward time: 3.25s Step time: 3.68s\n", - "2044800 Examples seen. Accuracy:0.9598 Error: 0.16521 Loss:0.15240 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.69s\n", - "2045440 Examples seen. Accuracy:0.9608 Error: 0.13204 Loss:0.15356 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.71s\n", - "Starting Validation.\n", - "Epochs: 41 Examples seen:2046064 Validation Accuracy: 0.9841 Validation Error: 0.0429 Validation Loss: 0.0413 Total time: 222.17min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 41. Working time: 3.7 hours.\n", - "2046704 Examples seen. Accuracy:0.9610 Error: 0.06388 Loss:0.04359 Threads: 8 Forward time: 5.06s Backward time: 3.28s Step time: 4.05s\n", - "2047344 Examples seen. Accuracy:0.9617 Error: 0.06283 Loss:0.05038 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.74s\n", - "2047984 Examples seen. Accuracy:0.9607 Error: 0.09704 Loss:0.09312 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "2048624 Examples seen. Accuracy:0.9609 Error: 0.10587 Loss:0.07870 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.66s\n", - "2049264 Examples seen. Accuracy:0.9606 Error: 0.07583 Loss:0.06637 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.65s\n", - "2049904 Examples seen. Accuracy:0.9603 Error: 0.05833 Loss:0.04777 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.65s\n", - "2050544 Examples seen. Accuracy:0.9614 Error: 0.09262 Loss:0.07276 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.62s\n", - "2051184 Examples seen. Accuracy:0.9608 Error: 0.14598 Loss:0.14171 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "2051824 Examples seen. Accuracy:0.9611 Error: 0.07429 Loss:0.07359 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.64s\n", - "2052464 Examples seen. Accuracy:0.9614 Error: 0.05238 Loss:0.03611 Threads: 8 Forward time: 5.12s Backward time: 3.33s Step time: 3.67s\n", - "2053104 Examples seen. Accuracy:0.9606 Error: 0.12459 Loss:0.12483 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.68s\n", - "2053744 Examples seen. Accuracy:0.9610 Error: 0.06927 Loss:0.05665 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.70s\n", - "2054384 Examples seen. Accuracy:0.9605 Error: 0.16545 Loss:0.15736 Threads: 8 Forward time: 5.35s Backward time: 3.35s Step time: 4.03s\n", - "2055024 Examples seen. Accuracy:0.9603 Error: 0.08296 Loss:0.06288 Threads: 8 Forward time: 5.08s Backward time: 3.22s Step time: 3.80s\n", - "2055664 Examples seen. Accuracy:0.9608 Error: 0.12343 Loss:0.10977 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.73s\n", - "2056304 Examples seen. Accuracy:0.9615 Error: 0.05542 Loss:0.03797 Threads: 8 Forward time: 5.24s Backward time: 3.30s Step time: 3.74s\n", - "2056944 Examples seen. Accuracy:0.9621 Error: 0.12744 Loss:0.20415 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.76s\n", - "2057584 Examples seen. Accuracy:0.9617 Error: 0.13120 Loss:0.19197 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.72s\n", - "2058224 Examples seen. Accuracy:0.9623 Error: 0.08980 Loss:0.06216 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.75s\n", - "2058864 Examples seen. Accuracy:0.9612 Error: 0.06868 Loss:0.05070 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.76s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2059504 Examples seen. Accuracy:0.9610 Error: 0.01298 Loss:0.00672 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.79s\n", - "2060144 Examples seen. Accuracy:0.9611 Error: 0.07902 Loss:0.05557 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.77s\n", - "2060784 Examples seen. Accuracy:0.9606 Error: 0.12193 Loss:0.10071 Threads: 8 Forward time: 5.18s Backward time: 3.34s Step time: 3.77s\n", - "2061424 Examples seen. Accuracy:0.9592 Error: 0.09883 Loss:0.13209 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.71s\n", - "2062064 Examples seen. Accuracy:0.9602 Error: 0.07230 Loss:0.04877 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.70s\n", - "2062704 Examples seen. Accuracy:0.9609 Error: 0.12590 Loss:0.18081 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.75s\n", - "2063344 Examples seen. Accuracy:0.9612 Error: 0.11088 Loss:0.17661 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2063984 Examples seen. Accuracy:0.9609 Error: 0.04687 Loss:0.02792 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.67s\n", - "2064624 Examples seen. Accuracy:0.9607 Error: 0.11287 Loss:0.13627 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.74s\n", - "2065264 Examples seen. Accuracy:0.9607 Error: 0.03670 Loss:0.02258 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.67s\n", - "2065904 Examples seen. Accuracy:0.9599 Error: 0.13738 Loss:0.25287 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.64s\n", - "2066544 Examples seen. Accuracy:0.9607 Error: 0.16876 Loss:0.19209 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "2067184 Examples seen. Accuracy:0.9597 Error: 0.13200 Loss:0.13405 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.69s\n", - "2067824 Examples seen. Accuracy:0.9604 Error: 0.09032 Loss:0.07258 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.67s\n", - "2068464 Examples seen. Accuracy:0.9610 Error: 0.08576 Loss:0.06849 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.67s\n", - "2069104 Examples seen. Accuracy:0.9608 Error: 0.17692 Loss:0.14961 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2069744 Examples seen. Accuracy:0.9602 Error: 0.12313 Loss:0.10311 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.70s\n", - "2070384 Examples seen. Accuracy:0.9601 Error: 0.11695 Loss:0.15260 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.70s\n", - "2071024 Examples seen. Accuracy:0.9614 Error: 0.14680 Loss:0.10326 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2071664 Examples seen. Accuracy:0.9619 Error: 0.07119 Loss:0.04497 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "2072304 Examples seen. Accuracy:0.9616 Error: 0.08803 Loss:0.05972 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.63s\n", - "2072944 Examples seen. Accuracy:0.9613 Error: 0.09678 Loss:0.17543 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.66s\n", - "2073584 Examples seen. Accuracy:0.9613 Error: 0.07737 Loss:0.05816 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2074224 Examples seen. Accuracy:0.9614 Error: 0.12186 Loss:0.10699 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.70s\n", - "2074864 Examples seen. Accuracy:0.9619 Error: 0.16597 Loss:0.18705 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "2075504 Examples seen. Accuracy:0.9621 Error: 0.15845 Loss:0.13995 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.63s\n", - "2076144 Examples seen. Accuracy:0.9610 Error: 0.04733 Loss:0.05108 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.67s\n", - "2076784 Examples seen. Accuracy:0.9599 Error: 0.14905 Loss:0.18632 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.64s\n", - "2077424 Examples seen. Accuracy:0.9604 Error: 0.14534 Loss:0.15695 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.63s\n", - "2078064 Examples seen. Accuracy:0.9620 Error: 0.06016 Loss:0.05733 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "2078704 Examples seen. Accuracy:0.9624 Error: 0.08540 Loss:0.05417 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.64s\n", - "2079344 Examples seen. Accuracy:0.9617 Error: 0.18419 Loss:0.21919 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.65s\n", - "2079984 Examples seen. Accuracy:0.9606 Error: 0.11250 Loss:0.15092 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.65s\n", - "2080624 Examples seen. Accuracy:0.9590 Error: 0.09583 Loss:0.08303 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "2081264 Examples seen. Accuracy:0.9598 Error: 0.07459 Loss:0.04854 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "2081904 Examples seen. Accuracy:0.9592 Error: 0.09017 Loss:0.09750 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "2082544 Examples seen. Accuracy:0.9598 Error: 0.04355 Loss:0.02540 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.63s\n", - "2083184 Examples seen. Accuracy:0.9609 Error: 0.05814 Loss:0.04025 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.63s\n", - "2083824 Examples seen. Accuracy:0.9606 Error: 0.03511 Loss:0.02003 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.68s\n", - "2084464 Examples seen. Accuracy:0.9595 Error: 0.09694 Loss:0.06281 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.66s\n", - "2085104 Examples seen. Accuracy:0.9592 Error: 0.17229 Loss:0.23004 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.66s\n", - "2085744 Examples seen. Accuracy:0.9602 Error: 0.11241 Loss:0.11904 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.64s\n", - "2086384 Examples seen. Accuracy:0.9602 Error: 0.05241 Loss:0.03136 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.65s\n", - "2087024 Examples seen. Accuracy:0.9607 Error: 0.09882 Loss:0.08587 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.65s\n", - "2087664 Examples seen. Accuracy:0.9599 Error: 0.04972 Loss:0.03560 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.66s\n", - "2088304 Examples seen. Accuracy:0.9598 Error: 0.12225 Loss:0.10739 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.71s\n", - "2088944 Examples seen. Accuracy:0.9598 Error: 0.09113 Loss:0.07337 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.69s\n", - "2089584 Examples seen. Accuracy:0.9601 Error: 0.12270 Loss:0.10179 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.70s\n", - "2090224 Examples seen. Accuracy:0.9600 Error: 0.05132 Loss:0.03167 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.71s\n", - "2090864 Examples seen. Accuracy:0.9591 Error: 0.08323 Loss:0.06303 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "2091504 Examples seen. Accuracy:0.9585 Error: 0.10813 Loss:0.13794 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.68s\n", - "2092144 Examples seen. Accuracy:0.9590 Error: 0.09136 Loss:0.09140 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.67s\n", - "2092784 Examples seen. Accuracy:0.9586 Error: 0.09629 Loss:0.11328 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.69s\n", - "2093424 Examples seen. Accuracy:0.9581 Error: 0.12296 Loss:0.15214 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "2094064 Examples seen. Accuracy:0.9571 Error: 0.12320 Loss:0.11565 Threads: 8 Forward time: 5.00s Backward time: 3.28s Step time: 3.71s\n", - "2094704 Examples seen. Accuracy:0.9574 Error: 0.10587 Loss:0.12594 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.72s\n", - "2095344 Examples seen. Accuracy:0.9571 Error: 0.10469 Loss:0.08153 Threads: 8 Forward time: 5.01s Backward time: 3.27s Step time: 3.69s\n", - "Starting Validation.\n", - "Epochs: 42 Examples seen:2095968 Validation Accuracy: 0.9837 Validation Error: 0.0430 Validation Loss: 0.0415 Total time: 227.45min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 42. Working time: 3.79 hours.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2096608 Examples seen. Accuracy:0.9573 Error: 0.11755 Loss:0.12870 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.74s\n", - "2097248 Examples seen. Accuracy:0.9575 Error: 0.11296 Loss:0.11223 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.67s\n", - "2097888 Examples seen. Accuracy:0.9582 Error: 0.07669 Loss:0.06412 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "2098528 Examples seen. Accuracy:0.9583 Error: 0.09211 Loss:0.09909 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.66s\n", - "2099168 Examples seen. Accuracy:0.9576 Error: 0.06681 Loss:0.04023 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "2099808 Examples seen. Accuracy:0.9586 Error: 0.05375 Loss:0.03276 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "2100448 Examples seen. Accuracy:0.9588 Error: 0.14109 Loss:0.33881 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2101088 Examples seen. Accuracy:0.9596 Error: 0.15976 Loss:0.16914 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.65s\n", - "2101728 Examples seen. Accuracy:0.9611 Error: 0.08281 Loss:0.06811 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2102368 Examples seen. Accuracy:0.9614 Error: 0.10608 Loss:0.10758 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.65s\n", - "2103008 Examples seen. Accuracy:0.9616 Error: 0.05203 Loss:0.04243 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.63s\n", - "2103648 Examples seen. Accuracy:0.9609 Error: 0.14012 Loss:0.13503 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2104288 Examples seen. Accuracy:0.9618 Error: 0.07116 Loss:0.06829 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "2104928 Examples seen. Accuracy:0.9614 Error: 0.10297 Loss:0.07590 Threads: 8 Forward time: 4.89s Backward time: 3.25s Step time: 3.62s\n", - "2105568 Examples seen. Accuracy:0.9609 Error: 0.02785 Loss:0.01627 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.64s\n", - "2106208 Examples seen. Accuracy:0.9605 Error: 0.18635 Loss:0.20010 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.61s\n", - "2106848 Examples seen. Accuracy:0.9605 Error: 0.07878 Loss:0.07617 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.62s\n", - "2107488 Examples seen. Accuracy:0.9617 Error: 0.04060 Loss:0.03705 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.62s\n", - "2108128 Examples seen. Accuracy:0.9624 Error: 0.06737 Loss:0.11918 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2108768 Examples seen. Accuracy:0.9627 Error: 0.09881 Loss:0.06549 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2109408 Examples seen. Accuracy:0.9617 Error: 0.08038 Loss:0.06482 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.62s\n", - "2110048 Examples seen. Accuracy:0.9622 Error: 0.04157 Loss:0.02704 Threads: 8 Forward time: 5.08s Backward time: 3.26s Step time: 3.63s\n", - "2110688 Examples seen. Accuracy:0.9615 Error: 0.05347 Loss:0.03057 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.66s\n", - "2111328 Examples seen. Accuracy:0.9615 Error: 0.03442 Loss:0.07377 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.63s\n", - "2111968 Examples seen. Accuracy:0.9606 Error: 0.09026 Loss:0.06736 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.63s\n", - "2112608 Examples seen. Accuracy:0.9612 Error: 0.09052 Loss:0.21152 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "2113248 Examples seen. Accuracy:0.9609 Error: 0.18786 Loss:0.20536 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.66s\n", - "2113888 Examples seen. Accuracy:0.9603 Error: 0.13745 Loss:0.17095 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.64s\n", - "2114528 Examples seen. Accuracy:0.9607 Error: 0.15790 Loss:0.20883 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.66s\n", - "2115168 Examples seen. Accuracy:0.9595 Error: 0.14595 Loss:0.11775 Threads: 8 Forward time: 4.93s Backward time: 3.19s Step time: 3.60s\n", - "2115808 Examples seen. Accuracy:0.9602 Error: 0.07886 Loss:0.07036 Threads: 8 Forward time: 5.02s Backward time: 3.28s Step time: 3.67s\n", - "2116448 Examples seen. Accuracy:0.9589 Error: 0.13103 Loss:0.11583 Threads: 8 Forward time: 5.15s Backward time: 3.29s Step time: 3.75s\n", - "2117088 Examples seen. Accuracy:0.9574 Error: 0.17012 Loss:0.18372 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.73s\n", - "2117728 Examples seen. Accuracy:0.9573 Error: 0.09321 Loss:0.17361 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.62s\n", - "2118368 Examples seen. Accuracy:0.9583 Error: 0.12010 Loss:0.10161 Threads: 8 Forward time: 4.93s Backward time: 3.24s Step time: 3.59s\n", - "2119008 Examples seen. Accuracy:0.9581 Error: 0.13168 Loss:0.27239 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.61s\n", - "2119648 Examples seen. Accuracy:0.9594 Error: 0.05126 Loss:0.02962 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.61s\n", - "2120288 Examples seen. Accuracy:0.9589 Error: 0.12949 Loss:0.14341 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.62s\n", - "2120928 Examples seen. Accuracy:0.9587 Error: 0.16659 Loss:0.12274 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.64s\n", - "2121568 Examples seen. Accuracy:0.9588 Error: 0.16076 Loss:0.22513 Threads: 8 Forward time: 5.01s Backward time: 3.19s Step time: 3.60s\n", - "2122208 Examples seen. Accuracy:0.9597 Error: 0.06150 Loss:0.03584 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.62s\n", - "2122848 Examples seen. Accuracy:0.9590 Error: 0.11596 Loss:0.16469 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2123488 Examples seen. Accuracy:0.9592 Error: 0.08563 Loss:0.11143 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.63s\n", - "2124128 Examples seen. Accuracy:0.9602 Error: 0.09218 Loss:0.07035 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.61s\n", - "2124768 Examples seen. Accuracy:0.9593 Error: 0.06447 Loss:0.04488 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "2125408 Examples seen. Accuracy:0.9598 Error: 0.05466 Loss:0.10201 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.67s\n", - "2126048 Examples seen. Accuracy:0.9608 Error: 0.12386 Loss:0.17441 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.63s\n", - "2126688 Examples seen. Accuracy:0.9615 Error: 0.11161 Loss:0.11010 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.61s\n", - "2127328 Examples seen. Accuracy:0.9623 Error: 0.03152 Loss:0.01665 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2127968 Examples seen. Accuracy:0.9629 Error: 0.09988 Loss:0.06874 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.64s\n", - "2128608 Examples seen. Accuracy:0.9612 Error: 0.09593 Loss:0.06687 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "2129248 Examples seen. Accuracy:0.9611 Error: 0.09015 Loss:0.08932 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.62s\n", - "2129888 Examples seen. Accuracy:0.9605 Error: 0.10368 Loss:0.11767 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.64s\n", - "2130528 Examples seen. Accuracy:0.9597 Error: 0.14479 Loss:0.23093 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.62s\n", - "2131168 Examples seen. Accuracy:0.9611 Error: 0.08661 Loss:0.10253 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.61s\n", - "2131808 Examples seen. Accuracy:0.9609 Error: 0.08236 Loss:0.12978 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.64s\n", - "2132448 Examples seen. Accuracy:0.9610 Error: 0.15831 Loss:0.14996 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.62s\n", - "2133088 Examples seen. Accuracy:0.9612 Error: 0.08443 Loss:0.07349 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.61s\n", - "2133728 Examples seen. Accuracy:0.9619 Error: 0.06208 Loss:0.04203 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.64s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2134368 Examples seen. Accuracy:0.9627 Error: 0.13095 Loss:0.13919 Threads: 8 Forward time: 5.35s Backward time: 3.40s Step time: 3.78s\n", - "2135008 Examples seen. Accuracy:0.9622 Error: 0.12744 Loss:0.11882 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.96s\n", - "2135648 Examples seen. Accuracy:0.9629 Error: 0.13301 Loss:0.19207 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.67s\n", - "2136288 Examples seen. Accuracy:0.9637 Error: 0.06881 Loss:0.08541 Threads: 8 Forward time: 5.14s Backward time: 3.27s Step time: 3.68s\n", - "2136928 Examples seen. Accuracy:0.9635 Error: 0.09436 Loss:0.08244 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.66s\n", - "2137568 Examples seen. Accuracy:0.9622 Error: 0.12418 Loss:0.11643 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2138208 Examples seen. Accuracy:0.9605 Error: 0.17934 Loss:0.17309 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2138848 Examples seen. Accuracy:0.9615 Error: 0.12980 Loss:0.12131 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.65s\n", - "2139488 Examples seen. Accuracy:0.9608 Error: 0.09891 Loss:0.08197 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.72s\n", - "2140128 Examples seen. Accuracy:0.9602 Error: 0.13142 Loss:0.13997 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.71s\n", - "2140768 Examples seen. Accuracy:0.9604 Error: 0.18337 Loss:0.22748 Threads: 8 Forward time: 5.08s Backward time: 3.24s Step time: 3.73s\n", - "2141408 Examples seen. Accuracy:0.9607 Error: 0.11484 Loss:0.10168 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.77s\n", - "2142048 Examples seen. Accuracy:0.9602 Error: 0.05541 Loss:0.06526 Threads: 8 Forward time: 5.06s Backward time: 3.29s Step time: 3.75s\n", - "2142688 Examples seen. Accuracy:0.9616 Error: 0.02845 Loss:0.01583 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.73s\n", - "2143328 Examples seen. Accuracy:0.9608 Error: 0.10429 Loss:0.10522 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.69s\n", - "2143968 Examples seen. Accuracy:0.9612 Error: 0.11446 Loss:0.24302 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.72s\n", - "2144608 Examples seen. Accuracy:0.9613 Error: 0.13311 Loss:0.12533 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.71s\n", - "2145248 Examples seen. Accuracy:0.9606 Error: 0.16251 Loss:0.17121 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.65s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 43 Examples seen:2145872 Validation Accuracy: 0.9855 Validation Error: 0.0417 Validation Loss: 0.0411 Total time: 232.68min\n", - "Epoch time: 4.8 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 43. Working time: 3.88 hours.\n", - "2146512 Examples seen. Accuracy:0.9604 Error: 0.11870 Loss:0.09435 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.82s\n", - "2147152 Examples seen. Accuracy:0.9606 Error: 0.06837 Loss:0.06857 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.70s\n", - "2147792 Examples seen. Accuracy:0.9609 Error: 0.08509 Loss:0.09438 Threads: 8 Forward time: 5.07s Backward time: 3.21s Step time: 3.72s\n", - "2148432 Examples seen. Accuracy:0.9607 Error: 0.13966 Loss:0.12524 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "2149072 Examples seen. Accuracy:0.9597 Error: 0.11357 Loss:0.11758 Threads: 8 Forward time: 5.07s Backward time: 3.20s Step time: 3.68s\n", - "2149712 Examples seen. Accuracy:0.9584 Error: 0.11924 Loss:0.08772 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.67s\n", - "2150352 Examples seen. Accuracy:0.9587 Error: 0.03487 Loss:0.02216 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.64s\n", - "2150992 Examples seen. Accuracy:0.9595 Error: 0.06457 Loss:0.06064 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.66s\n", - "2151632 Examples seen. Accuracy:0.9596 Error: 0.07961 Loss:0.06442 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "2152272 Examples seen. Accuracy:0.9595 Error: 0.03298 Loss:0.02337 Threads: 8 Forward time: 4.92s Backward time: 3.18s Step time: 3.65s\n", - "2152912 Examples seen. Accuracy:0.9590 Error: 0.10797 Loss:0.07846 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.65s\n", - "2153552 Examples seen. Accuracy:0.9600 Error: 0.08992 Loss:0.08442 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.66s\n", - "2154192 Examples seen. Accuracy:0.9595 Error: 0.12749 Loss:0.11727 Threads: 8 Forward time: 5.05s Backward time: 3.24s Step time: 3.65s\n", - "2154832 Examples seen. Accuracy:0.9590 Error: 0.12088 Loss:0.12940 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.67s\n", - "2155472 Examples seen. Accuracy:0.9610 Error: 0.03941 Loss:0.02266 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.66s\n", - "2156112 Examples seen. Accuracy:0.9614 Error: 0.09451 Loss:0.06711 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "2156752 Examples seen. Accuracy:0.9618 Error: 0.09939 Loss:0.17680 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.67s\n", - "2157392 Examples seen. Accuracy:0.9611 Error: 0.08817 Loss:0.06361 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.65s\n", - "2158032 Examples seen. Accuracy:0.9606 Error: 0.21197 Loss:0.32624 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.70s\n", - "2158672 Examples seen. Accuracy:0.9612 Error: 0.08602 Loss:0.08977 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.63s\n", - "2159312 Examples seen. Accuracy:0.9620 Error: 0.06605 Loss:0.04316 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.64s\n", - "2159952 Examples seen. Accuracy:0.9640 Error: 0.13262 Loss:0.26982 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.64s\n", - "2160592 Examples seen. Accuracy:0.9651 Error: 0.03522 Loss:0.02577 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.65s\n", - "2161232 Examples seen. Accuracy:0.9660 Error: 0.02488 Loss:0.01370 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.66s\n", - "2161872 Examples seen. Accuracy:0.9670 Error: 0.12088 Loss:0.10219 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2162512 Examples seen. Accuracy:0.9673 Error: 0.08449 Loss:0.05989 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.65s\n", - "2163152 Examples seen. Accuracy:0.9676 Error: 0.04382 Loss:0.02866 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.64s\n", - "2163792 Examples seen. Accuracy:0.9678 Error: 0.05929 Loss:0.05157 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.65s\n", - "2164432 Examples seen. Accuracy:0.9678 Error: 0.08612 Loss:0.05610 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.64s\n", - "2165072 Examples seen. Accuracy:0.9687 Error: 0.13214 Loss:0.13166 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.64s\n", - "2165712 Examples seen. Accuracy:0.9685 Error: 0.09036 Loss:0.07031 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.66s\n", - "2166352 Examples seen. Accuracy:0.9684 Error: 0.09283 Loss:0.09278 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.66s\n", - "2166992 Examples seen. Accuracy:0.9673 Error: 0.14268 Loss:0.13038 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.65s\n", - "2167632 Examples seen. Accuracy:0.9659 Error: 0.05910 Loss:0.06256 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.64s\n", - "2168272 Examples seen. Accuracy:0.9663 Error: 0.07668 Loss:0.06164 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2168912 Examples seen. Accuracy:0.9658 Error: 0.07944 Loss:0.08093 Threads: 8 Forward time: 4.96s Backward time: 3.17s Step time: 3.63s\n", - "2169552 Examples seen. Accuracy:0.9655 Error: 0.13784 Loss:0.13705 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.65s\n", - "2170192 Examples seen. Accuracy:0.9648 Error: 0.06838 Loss:0.04370 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2170832 Examples seen. Accuracy:0.9652 Error: 0.04912 Loss:0.03298 Threads: 8 Forward time: 5.04s Backward time: 3.21s Step time: 3.67s\n", - "2171472 Examples seen. Accuracy:0.9651 Error: 0.06095 Loss:0.05595 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.67s\n", - "2172112 Examples seen. Accuracy:0.9649 Error: 0.20185 Loss:0.23184 Threads: 8 Forward time: 5.21s Backward time: 3.32s Step time: 3.72s\n", - "2172752 Examples seen. Accuracy:0.9654 Error: 0.06384 Loss:0.06430 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.72s\n", - "2173392 Examples seen. Accuracy:0.9647 Error: 0.05035 Loss:0.03945 Threads: 8 Forward time: 5.05s Backward time: 3.20s Step time: 3.71s\n", - "2174032 Examples seen. Accuracy:0.9645 Error: 0.12800 Loss:0.11676 Threads: 8 Forward time: 5.09s Backward time: 3.27s Step time: 3.68s\n", - "2174672 Examples seen. Accuracy:0.9642 Error: 0.17388 Loss:0.52305 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.68s\n", - "2175312 Examples seen. Accuracy:0.9645 Error: 0.09176 Loss:0.07710 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "2175952 Examples seen. Accuracy:0.9638 Error: 0.16468 Loss:0.15757 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.64s\n", - "2176592 Examples seen. Accuracy:0.9636 Error: 0.02155 Loss:0.01151 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.64s\n", - "2177232 Examples seen. Accuracy:0.9635 Error: 0.05643 Loss:0.05546 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2177872 Examples seen. Accuracy:0.9643 Error: 0.05742 Loss:0.03892 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.63s\n", - "2178512 Examples seen. Accuracy:0.9633 Error: 0.07291 Loss:0.08091 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.70s\n", - "2179152 Examples seen. Accuracy:0.9626 Error: 0.10645 Loss:0.10482 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2179792 Examples seen. Accuracy:0.9628 Error: 0.07571 Loss:0.07119 Threads: 8 Forward time: 5.09s Backward time: 3.21s Step time: 3.69s\n", - "2180432 Examples seen. Accuracy:0.9630 Error: 0.03258 Loss:0.02193 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.65s\n", - "2181072 Examples seen. Accuracy:0.9639 Error: 0.04135 Loss:0.02377 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.62s\n", - "2181712 Examples seen. Accuracy:0.9635 Error: 0.05194 Loss:0.05061 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.63s\n", - "2182352 Examples seen. Accuracy:0.9630 Error: 0.14752 Loss:0.14869 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "2182992 Examples seen. Accuracy:0.9623 Error: 0.08928 Loss:0.13221 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "2183632 Examples seen. Accuracy:0.9635 Error: 0.06841 Loss:0.04940 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.63s\n", - "2184272 Examples seen. Accuracy:0.9635 Error: 0.05628 Loss:0.06051 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.63s\n", - "2184912 Examples seen. Accuracy:0.9631 Error: 0.13624 Loss:0.17239 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.62s\n", - "2185552 Examples seen. Accuracy:0.9625 Error: 0.10047 Loss:0.09523 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.65s\n", - "2186192 Examples seen. Accuracy:0.9625 Error: 0.07725 Loss:0.10182 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.60s\n", - "2186832 Examples seen. Accuracy:0.9635 Error: 0.05553 Loss:0.03412 Threads: 8 Forward time: 5.02s Backward time: 3.19s Step time: 3.62s\n", - "2187472 Examples seen. Accuracy:0.9632 Error: 0.09951 Loss:0.14583 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.60s\n", - "2188112 Examples seen. Accuracy:0.9618 Error: 0.17636 Loss:0.19233 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.62s\n", - "2188752 Examples seen. Accuracy:0.9624 Error: 0.06237 Loss:0.07669 Threads: 8 Forward time: 4.94s Backward time: 3.23s Step time: 3.63s\n", - "2189392 Examples seen. Accuracy:0.9620 Error: 0.07951 Loss:0.17669 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.62s\n", - "2190032 Examples seen. Accuracy:0.9608 Error: 0.07631 Loss:0.05460 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.65s\n", - "2190672 Examples seen. Accuracy:0.9613 Error: 0.04729 Loss:0.03467 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "2191312 Examples seen. Accuracy:0.9616 Error: 0.14495 Loss:0.15853 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.63s\n", - "2191952 Examples seen. Accuracy:0.9606 Error: 0.09179 Loss:0.06952 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.65s\n", - "2192592 Examples seen. Accuracy:0.9620 Error: 0.07045 Loss:0.04203 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "2193232 Examples seen. Accuracy:0.9631 Error: 0.12244 Loss:0.19063 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.64s\n", - "2193872 Examples seen. Accuracy:0.9637 Error: 0.05471 Loss:0.04694 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.64s\n", - "2194512 Examples seen. Accuracy:0.9645 Error: 0.06500 Loss:0.05480 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.65s\n", - "2195152 Examples seen. Accuracy:0.9646 Error: 0.11435 Loss:0.16702 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.63s\n", - "Starting Validation.\n", - "Epochs: 44 Examples seen:2195776 Validation Accuracy: 0.9848 Validation Error: 0.0415 Validation Loss: 0.0413 Total time: 237.90min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 44. Working time: 3.96 hours.\n", - "2196416 Examples seen. Accuracy:0.9638 Error: 0.06707 Loss:0.05455 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.70s\n", - "2197056 Examples seen. Accuracy:0.9625 Error: 0.13326 Loss:0.13287 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.63s\n", - "2197696 Examples seen. Accuracy:0.9623 Error: 0.05389 Loss:0.03639 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.65s\n", - "2198336 Examples seen. Accuracy:0.9620 Error: 0.13830 Loss:0.13066 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.65s\n", - "2198976 Examples seen. Accuracy:0.9610 Error: 0.13539 Loss:0.21107 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.64s\n", - "2199616 Examples seen. Accuracy:0.9604 Error: 0.08878 Loss:0.06378 Threads: 8 Forward time: 4.99s Backward time: 3.17s Step time: 3.65s\n", - "2200256 Examples seen. Accuracy:0.9606 Error: 0.06904 Loss:0.06060 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "2200896 Examples seen. Accuracy:0.9607 Error: 0.05256 Loss:0.03415 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.67s\n", - "2201536 Examples seen. Accuracy:0.9603 Error: 0.14263 Loss:0.17076 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.68s\n", - "2202176 Examples seen. Accuracy:0.9598 Error: 0.05747 Loss:0.03314 Threads: 8 Forward time: 5.04s Backward time: 3.22s Step time: 3.72s\n", - "2202816 Examples seen. Accuracy:0.9598 Error: 0.03687 Loss:0.01958 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.65s\n", - "2203456 Examples seen. Accuracy:0.9598 Error: 0.16185 Loss:0.14748 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.66s\n", - "2204096 Examples seen. Accuracy:0.9583 Error: 0.12543 Loss:0.12296 Threads: 8 Forward time: 4.95s Backward time: 3.18s Step time: 3.69s\n", - "2204736 Examples seen. Accuracy:0.9574 Error: 0.12225 Loss:0.08578 Threads: 8 Forward time: 4.99s Backward time: 3.14s Step time: 3.62s\n", - "2205376 Examples seen. Accuracy:0.9581 Error: 0.08237 Loss:0.08074 Threads: 8 Forward time: 4.98s Backward time: 3.15s Step time: 3.63s\n", - "2206016 Examples seen. Accuracy:0.9584 Error: 0.06354 Loss:0.04355 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "2206656 Examples seen. Accuracy:0.9578 Error: 0.10062 Loss:0.10562 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "2207296 Examples seen. Accuracy:0.9581 Error: 0.11726 Loss:0.12055 Threads: 8 Forward time: 5.01s Backward time: 3.18s Step time: 3.68s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2207936 Examples seen. Accuracy:0.9583 Error: 0.09920 Loss:0.07449 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.67s\n", - "2208576 Examples seen. Accuracy:0.9588 Error: 0.12463 Loss:0.13638 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.66s\n", - "2209216 Examples seen. Accuracy:0.9601 Error: 0.06230 Loss:0.04044 Threads: 8 Forward time: 5.09s Backward time: 3.23s Step time: 3.71s\n", - "2209856 Examples seen. Accuracy:0.9608 Error: 0.07249 Loss:0.05280 Threads: 8 Forward time: 5.09s Backward time: 3.24s Step time: 3.66s\n", - "2210496 Examples seen. Accuracy:0.9610 Error: 0.10962 Loss:0.09056 Threads: 8 Forward time: 5.10s Backward time: 3.24s Step time: 3.68s\n", - "2211136 Examples seen. Accuracy:0.9606 Error: 0.08151 Loss:0.05317 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.79s\n", - "2211776 Examples seen. Accuracy:0.9597 Error: 0.04749 Loss:0.04379 Threads: 8 Forward time: 4.94s Backward time: 3.18s Step time: 3.61s\n", - "2212416 Examples seen. Accuracy:0.9596 Error: 0.09546 Loss:0.20199 Threads: 8 Forward time: 5.09s Backward time: 3.22s Step time: 3.62s\n", - "2213056 Examples seen. Accuracy:0.9584 Error: 0.04853 Loss:0.03590 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.66s\n", - "2213696 Examples seen. Accuracy:0.9585 Error: 0.17711 Loss:0.21431 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.62s\n", - "2214336 Examples seen. Accuracy:0.9599 Error: 0.07186 Loss:0.07902 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.65s\n", - "2214976 Examples seen. Accuracy:0.9612 Error: 0.04215 Loss:0.02723 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.64s\n", - "2215616 Examples seen. Accuracy:0.9603 Error: 0.04699 Loss:0.04298 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "2216256 Examples seen. Accuracy:0.9596 Error: 0.11184 Loss:0.11202 Threads: 8 Forward time: 5.04s Backward time: 3.20s Step time: 3.74s\n", - "2216896 Examples seen. Accuracy:0.9597 Error: 0.04524 Loss:0.03202 Threads: 8 Forward time: 5.15s Backward time: 3.25s Step time: 3.66s\n", - "2217536 Examples seen. Accuracy:0.9594 Error: 0.13752 Loss:0.11988 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2218176 Examples seen. Accuracy:0.9602 Error: 0.09819 Loss:0.07648 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.65s\n", - "2218816 Examples seen. Accuracy:0.9591 Error: 0.13943 Loss:0.11528 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.69s\n", - "2219456 Examples seen. Accuracy:0.9586 Error: 0.12464 Loss:0.10484 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2220096 Examples seen. Accuracy:0.9590 Error: 0.11554 Loss:0.16358 Threads: 8 Forward time: 4.97s Backward time: 3.19s Step time: 3.64s\n", - "2220736 Examples seen. Accuracy:0.9592 Error: 0.08293 Loss:0.06599 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "2221376 Examples seen. Accuracy:0.9588 Error: 0.13179 Loss:0.12383 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.62s\n", - "2222016 Examples seen. Accuracy:0.9598 Error: 0.08749 Loss:0.09880 Threads: 8 Forward time: 5.06s Backward time: 3.21s Step time: 3.64s\n", - "2222656 Examples seen. Accuracy:0.9603 Error: 0.08564 Loss:0.09390 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.65s\n", - "2223296 Examples seen. Accuracy:0.9596 Error: 0.12061 Loss:0.10053 Threads: 8 Forward time: 5.06s Backward time: 3.30s Step time: 3.73s\n", - "2223936 Examples seen. Accuracy:0.9595 Error: 0.10722 Loss:0.09209 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "2224576 Examples seen. Accuracy:0.9597 Error: 0.05098 Loss:0.02852 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.65s\n", - "2225216 Examples seen. Accuracy:0.9600 Error: 0.09565 Loss:0.06985 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.67s\n", - "2225856 Examples seen. Accuracy:0.9602 Error: 0.09653 Loss:0.12077 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2226496 Examples seen. Accuracy:0.9596 Error: 0.15849 Loss:0.15500 Threads: 8 Forward time: 4.98s Backward time: 3.18s Step time: 3.67s\n", - "2227136 Examples seen. Accuracy:0.9609 Error: 0.05617 Loss:0.03544 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "2227776 Examples seen. Accuracy:0.9613 Error: 0.05057 Loss:0.03336 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.65s\n", - "2228416 Examples seen. Accuracy:0.9612 Error: 0.05656 Loss:0.03637 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.65s\n", - "2229056 Examples seen. Accuracy:0.9616 Error: 0.11143 Loss:0.09354 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "2229696 Examples seen. Accuracy:0.9625 Error: 0.04199 Loss:0.03019 Threads: 8 Forward time: 4.92s Backward time: 3.19s Step time: 3.63s\n", - "2230336 Examples seen. Accuracy:0.9620 Error: 0.13581 Loss:0.14776 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.64s\n", - "2230976 Examples seen. Accuracy:0.9626 Error: 0.05760 Loss:0.03630 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2231616 Examples seen. Accuracy:0.9623 Error: 0.01874 Loss:0.00998 Threads: 8 Forward time: 4.99s Backward time: 3.27s Step time: 3.64s\n", - "2232256 Examples seen. Accuracy:0.9619 Error: 0.10529 Loss:0.12723 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.66s\n", - "2232896 Examples seen. Accuracy:0.9610 Error: 0.13822 Loss:0.15331 Threads: 8 Forward time: 5.03s Backward time: 3.21s Step time: 3.68s\n", - "2233536 Examples seen. Accuracy:0.9607 Error: 0.21609 Loss:0.19170 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.66s\n", - "2234176 Examples seen. Accuracy:0.9610 Error: 0.10512 Loss:0.08462 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.72s\n", - "2234816 Examples seen. Accuracy:0.9624 Error: 0.11837 Loss:0.13793 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "2235456 Examples seen. Accuracy:0.9635 Error: 0.05210 Loss:0.03221 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.68s\n", - "2236096 Examples seen. Accuracy:0.9647 Error: 0.10461 Loss:0.07700 Threads: 8 Forward time: 4.90s Backward time: 3.23s Step time: 3.65s\n", - "2236736 Examples seen. Accuracy:0.9634 Error: 0.10867 Loss:0.10829 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.62s\n", - "2237376 Examples seen. Accuracy:0.9640 Error: 0.04601 Loss:0.05502 Threads: 8 Forward time: 5.12s Backward time: 3.28s Step time: 3.69s\n", - "2238016 Examples seen. Accuracy:0.9639 Error: 0.16007 Loss:0.15163 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "2238656 Examples seen. Accuracy:0.9632 Error: 0.16099 Loss:0.15999 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.66s\n", - "2239296 Examples seen. Accuracy:0.9636 Error: 0.04550 Loss:0.02949 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.65s\n", - "2239936 Examples seen. Accuracy:0.9625 Error: 0.06603 Loss:0.04459 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.65s\n", - "2240576 Examples seen. Accuracy:0.9625 Error: 0.11153 Loss:0.12012 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.65s\n", - "2241216 Examples seen. Accuracy:0.9611 Error: 0.15451 Loss:0.14785 Threads: 8 Forward time: 4.93s Backward time: 3.18s Step time: 3.64s\n", - "2241856 Examples seen. Accuracy:0.9610 Error: 0.14070 Loss:0.14935 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.64s\n", - "2242496 Examples seen. Accuracy:0.9615 Error: 0.04407 Loss:0.02899 Threads: 8 Forward time: 4.98s Backward time: 3.20s Step time: 3.62s\n", - "2243136 Examples seen. Accuracy:0.9617 Error: 0.11429 Loss:0.08094 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n", - "2243776 Examples seen. Accuracy:0.9622 Error: 0.04540 Loss:0.02638 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.66s\n", - "2244416 Examples seen. Accuracy:0.9615 Error: 0.10046 Loss:0.09142 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.76s\n", - "2245056 Examples seen. Accuracy:0.9605 Error: 0.09301 Loss:0.07046 Threads: 8 Forward time: 5.00s Backward time: 3.20s Step time: 3.69s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Starting Validation.\n", - "Epochs: 45 Examples seen:2245680 Validation Accuracy: 0.9844 Validation Error: 0.0412 Validation Loss: 0.0409 Total time: 243.13min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 45. Working time: 4.05 hours.\n", - "2246320 Examples seen. Accuracy:0.9590 Error: 0.13287 Loss:0.14000 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.77s\n", - "2246960 Examples seen. Accuracy:0.9602 Error: 0.07393 Loss:0.04641 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.70s\n", - "2247600 Examples seen. Accuracy:0.9611 Error: 0.04645 Loss:0.02693 Threads: 8 Forward time: 4.98s Backward time: 3.19s Step time: 3.70s\n", - "2248240 Examples seen. Accuracy:0.9608 Error: 0.06798 Loss:0.04907 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.65s\n", - "2248880 Examples seen. Accuracy:0.9604 Error: 0.09797 Loss:0.16991 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.66s\n", - "2249520 Examples seen. Accuracy:0.9612 Error: 0.08461 Loss:0.05634 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "2250160 Examples seen. Accuracy:0.9604 Error: 0.17183 Loss:0.18872 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.66s\n", - "2250800 Examples seen. Accuracy:0.9612 Error: 0.07799 Loss:0.12348 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2251440 Examples seen. Accuracy:0.9603 Error: 0.09607 Loss:0.08760 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.67s\n", - "2252080 Examples seen. Accuracy:0.9614 Error: 0.13596 Loss:0.08938 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.72s\n", - "2252720 Examples seen. Accuracy:0.9616 Error: 0.09007 Loss:0.05811 Threads: 8 Forward time: 5.07s Backward time: 3.24s Step time: 3.79s\n", - "2253360 Examples seen. Accuracy:0.9614 Error: 0.03817 Loss:0.02145 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.68s\n", - "2254000 Examples seen. Accuracy:0.9610 Error: 0.10563 Loss:0.10389 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.62s\n", - "2254640 Examples seen. Accuracy:0.9614 Error: 0.00998 Loss:0.00513 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.61s\n", - "2255280 Examples seen. Accuracy:0.9616 Error: 0.05746 Loss:0.03908 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.62s\n", - "2255920 Examples seen. Accuracy:0.9611 Error: 0.10721 Loss:0.14647 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2256560 Examples seen. Accuracy:0.9614 Error: 0.18735 Loss:0.14859 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.62s\n", - "2257200 Examples seen. Accuracy:0.9621 Error: 0.09981 Loss:0.07409 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.61s\n", - "2257840 Examples seen. Accuracy:0.9615 Error: 0.11865 Loss:0.12271 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.61s\n", - "2258480 Examples seen. Accuracy:0.9618 Error: 0.13714 Loss:0.11136 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2259120 Examples seen. Accuracy:0.9618 Error: 0.11446 Loss:0.13939 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.61s\n", - "2259760 Examples seen. Accuracy:0.9625 Error: 0.09157 Loss:0.06055 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2260400 Examples seen. Accuracy:0.9630 Error: 0.07051 Loss:0.06171 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.61s\n", - "2261040 Examples seen. Accuracy:0.9620 Error: 0.09042 Loss:0.10776 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.61s\n", - "2261680 Examples seen. Accuracy:0.9615 Error: 0.10618 Loss:0.11881 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.62s\n", - "2262320 Examples seen. Accuracy:0.9626 Error: 0.04730 Loss:0.02709 Threads: 8 Forward time: 4.99s Backward time: 3.28s Step time: 3.64s\n", - "2262960 Examples seen. Accuracy:0.9648 Error: 0.09662 Loss:0.06404 Threads: 8 Forward time: 5.02s Backward time: 3.20s Step time: 3.64s\n", - "2263600 Examples seen. Accuracy:0.9634 Error: 0.13704 Loss:0.12595 Threads: 8 Forward time: 5.11s Backward time: 3.24s Step time: 3.65s\n", - "2264240 Examples seen. Accuracy:0.9625 Error: 0.08465 Loss:0.06717 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.64s\n", - "2264880 Examples seen. Accuracy:0.9625 Error: 0.05490 Loss:0.03704 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.63s\n", - "2265520 Examples seen. Accuracy:0.9623 Error: 0.06769 Loss:0.10501 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2266160 Examples seen. Accuracy:0.9620 Error: 0.08800 Loss:0.07231 Threads: 8 Forward time: 4.98s Backward time: 3.21s Step time: 3.62s\n", - "2266800 Examples seen. Accuracy:0.9612 Error: 0.11540 Loss:0.17783 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.61s\n", - "2267440 Examples seen. Accuracy:0.9604 Error: 0.09549 Loss:0.07448 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.63s\n", - "2268080 Examples seen. Accuracy:0.9603 Error: 0.15698 Loss:0.17653 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2268720 Examples seen. Accuracy:0.9612 Error: 0.08598 Loss:0.08932 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.63s\n", - "2269360 Examples seen. Accuracy:0.9618 Error: 0.09553 Loss:0.07079 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.61s\n", - "2270000 Examples seen. Accuracy:0.9618 Error: 0.12322 Loss:0.16417 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2270640 Examples seen. Accuracy:0.9614 Error: 0.13161 Loss:0.11291 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.64s\n", - "2271280 Examples seen. Accuracy:0.9617 Error: 0.03687 Loss:0.02291 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.62s\n", - "2271920 Examples seen. Accuracy:0.9614 Error: 0.04881 Loss:0.03459 Threads: 8 Forward time: 4.96s Backward time: 3.27s Step time: 3.62s\n", - "2272560 Examples seen. Accuracy:0.9620 Error: 0.07914 Loss:0.13548 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.64s\n", - "2273200 Examples seen. Accuracy:0.9619 Error: 0.10582 Loss:0.17881 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.64s\n", - "2273840 Examples seen. Accuracy:0.9622 Error: 0.02611 Loss:0.01410 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.62s\n", - "2274480 Examples seen. Accuracy:0.9611 Error: 0.16059 Loss:0.21528 Threads: 8 Forward time: 4.97s Backward time: 3.18s Step time: 3.67s\n", - "2275120 Examples seen. Accuracy:0.9603 Error: 0.18593 Loss:0.23348 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.66s\n", - "2275760 Examples seen. Accuracy:0.9610 Error: 0.07312 Loss:0.04423 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.64s\n", - "2276400 Examples seen. Accuracy:0.9608 Error: 0.10570 Loss:0.08029 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.65s\n", - "2277040 Examples seen. Accuracy:0.9621 Error: 0.03549 Loss:0.02158 Threads: 8 Forward time: 4.93s Backward time: 3.20s Step time: 3.63s\n", - "2277680 Examples seen. Accuracy:0.9622 Error: 0.14673 Loss:0.25365 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "2278320 Examples seen. Accuracy:0.9616 Error: 0.08798 Loss:0.05844 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.64s\n", - "2278960 Examples seen. Accuracy:0.9619 Error: 0.04089 Loss:0.02319 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "2279600 Examples seen. Accuracy:0.9607 Error: 0.16720 Loss:0.15693 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.65s\n", - "2280240 Examples seen. Accuracy:0.9616 Error: 0.16471 Loss:0.18166 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "2280880 Examples seen. Accuracy:0.9616 Error: 0.10903 Loss:0.09177 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.68s\n", - "2281520 Examples seen. Accuracy:0.9626 Error: 0.07130 Loss:0.04390 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.64s\n", - "2282160 Examples seen. Accuracy:0.9627 Error: 0.12406 Loss:0.10357 Threads: 8 Forward time: 5.37s Backward time: 3.42s Step time: 3.82s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2282800 Examples seen. Accuracy:0.9639 Error: 0.04753 Loss:0.03004 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.82s\n", - "2283440 Examples seen. Accuracy:0.9631 Error: 0.04131 Loss:0.02814 Threads: 8 Forward time: 5.07s Backward time: 3.25s Step time: 3.68s\n", - "2284080 Examples seen. Accuracy:0.9627 Error: 0.08728 Loss:0.06733 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.68s\n", - "2284720 Examples seen. Accuracy:0.9627 Error: 0.04914 Loss:0.08239 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.70s\n", - "2285360 Examples seen. Accuracy:0.9628 Error: 0.13192 Loss:0.11839 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.77s\n", - "2286000 Examples seen. Accuracy:0.9634 Error: 0.12382 Loss:0.08834 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.75s\n", - "2286640 Examples seen. Accuracy:0.9637 Error: 0.06468 Loss:0.04005 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n", - "2287280 Examples seen. Accuracy:0.9635 Error: 0.07094 Loss:0.09748 Threads: 8 Forward time: 5.01s Backward time: 3.29s Step time: 3.73s\n", - "2287920 Examples seen. Accuracy:0.9634 Error: 0.08314 Loss:0.11998 Threads: 8 Forward time: 5.09s Backward time: 3.30s Step time: 3.82s\n", - "2288560 Examples seen. Accuracy:0.9638 Error: 0.03340 Loss:0.02209 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.75s\n", - "2289200 Examples seen. Accuracy:0.9635 Error: 0.03898 Loss:0.02286 Threads: 8 Forward time: 5.07s Backward time: 3.31s Step time: 3.77s\n", - "2289840 Examples seen. Accuracy:0.9625 Error: 0.08315 Loss:0.08321 Threads: 8 Forward time: 5.35s Backward time: 3.39s Step time: 4.02s\n", - "2290480 Examples seen. Accuracy:0.9626 Error: 0.03112 Loss:0.02013 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.87s\n", - "2291120 Examples seen. Accuracy:0.9629 Error: 0.05307 Loss:0.04526 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.68s\n", - "2291760 Examples seen. Accuracy:0.9628 Error: 0.09894 Loss:0.07864 Threads: 8 Forward time: 5.13s Backward time: 3.30s Step time: 3.95s\n", - "2292400 Examples seen. Accuracy:0.9637 Error: 0.07239 Loss:0.04569 Threads: 8 Forward time: 4.98s Backward time: 3.30s Step time: 3.78s\n", - "2293040 Examples seen. Accuracy:0.9638 Error: 0.11483 Loss:0.09025 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.68s\n", - "2293680 Examples seen. Accuracy:0.9633 Error: 0.10536 Loss:0.09038 Threads: 8 Forward time: 5.13s Backward time: 3.31s Step time: 3.70s\n", - "2294320 Examples seen. Accuracy:0.9635 Error: 0.07328 Loss:0.06695 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.70s\n", - "2294960 Examples seen. Accuracy:0.9646 Error: 0.03526 Loss:0.02102 Threads: 8 Forward time: 4.94s Backward time: 3.25s Step time: 3.72s\n", - "Starting Validation.\n", - "Epochs: 46 Examples seen:2295584 Validation Accuracy: 0.9851 Validation Error: 0.0408 Validation Loss: 0.0409 Total time: 248.38min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8.1 hours.\n", - "Epochs: 46. Working time: 4.14 hours.\n", - "2296224 Examples seen. Accuracy:0.9659 Error: 0.06586 Loss:0.04170 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.74s\n", - "2296864 Examples seen. Accuracy:0.9669 Error: 0.10571 Loss:0.08581 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2297504 Examples seen. Accuracy:0.9672 Error: 0.12960 Loss:0.17848 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.63s\n", - "2298144 Examples seen. Accuracy:0.9668 Error: 0.07218 Loss:0.04813 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.69s\n", - "2298784 Examples seen. Accuracy:0.9660 Error: 0.04048 Loss:0.03776 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2299424 Examples seen. Accuracy:0.9656 Error: 0.11407 Loss:0.09519 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.63s\n", - "2300064 Examples seen. Accuracy:0.9649 Error: 0.16443 Loss:0.19128 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.62s\n", - "2300704 Examples seen. Accuracy:0.9649 Error: 0.05915 Loss:0.05190 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.64s\n", - "2301344 Examples seen. Accuracy:0.9645 Error: 0.06060 Loss:0.06638 Threads: 8 Forward time: 5.09s Backward time: 3.33s Step time: 3.73s\n", - "2301984 Examples seen. Accuracy:0.9642 Error: 0.05887 Loss:0.03872 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.71s\n", - "2302624 Examples seen. Accuracy:0.9641 Error: 0.07616 Loss:0.05571 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.64s\n", - "2303264 Examples seen. Accuracy:0.9634 Error: 0.16277 Loss:0.15420 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.63s\n", - "2303904 Examples seen. Accuracy:0.9622 Error: 0.08038 Loss:0.05928 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.63s\n", - "2304544 Examples seen. Accuracy:0.9625 Error: 0.14119 Loss:0.12603 Threads: 8 Forward time: 4.91s Backward time: 3.24s Step time: 3.69s\n", - "2305184 Examples seen. Accuracy:0.9634 Error: 0.06931 Loss:0.05200 Threads: 8 Forward time: 5.26s Backward time: 3.37s Step time: 3.71s\n", - "2305824 Examples seen. Accuracy:0.9637 Error: 0.09103 Loss:0.09831 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.69s\n", - "2306464 Examples seen. Accuracy:0.9635 Error: 0.10158 Loss:0.08078 Threads: 8 Forward time: 5.06s Backward time: 3.28s Step time: 3.71s\n", - "2307104 Examples seen. Accuracy:0.9633 Error: 0.05507 Loss:0.07271 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.69s\n", - "2307744 Examples seen. Accuracy:0.9630 Error: 0.07128 Loss:0.08737 Threads: 8 Forward time: 4.98s Backward time: 3.27s Step time: 3.68s\n", - "2308384 Examples seen. Accuracy:0.9629 Error: 0.08583 Loss:0.06452 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.63s\n", - "2309024 Examples seen. Accuracy:0.9635 Error: 0.09245 Loss:0.07497 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.77s\n", - "2309664 Examples seen. Accuracy:0.9642 Error: 0.12577 Loss:0.10954 Threads: 8 Forward time: 4.94s Backward time: 3.26s Step time: 3.66s\n", - "2310304 Examples seen. Accuracy:0.9632 Error: 0.07365 Loss:0.05179 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.65s\n", - "2310944 Examples seen. Accuracy:0.9634 Error: 0.05146 Loss:0.04511 Threads: 8 Forward time: 4.92s Backward time: 3.24s Step time: 3.66s\n", - "2311584 Examples seen. Accuracy:0.9630 Error: 0.07177 Loss:0.06599 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "2312224 Examples seen. Accuracy:0.9639 Error: 0.05840 Loss:0.06960 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "2312864 Examples seen. Accuracy:0.9628 Error: 0.10309 Loss:0.09770 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "2313504 Examples seen. Accuracy:0.9627 Error: 0.08128 Loss:0.08986 Threads: 8 Forward time: 5.02s Backward time: 3.26s Step time: 3.72s\n", - "2314144 Examples seen. Accuracy:0.9632 Error: 0.08707 Loss:0.06622 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.66s\n", - "2314784 Examples seen. Accuracy:0.9636 Error: 0.08530 Loss:0.08689 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.67s\n", - "2315424 Examples seen. Accuracy:0.9631 Error: 0.11385 Loss:0.23239 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.67s\n", - "2316064 Examples seen. Accuracy:0.9635 Error: 0.16649 Loss:0.17085 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.66s\n", - "2316704 Examples seen. Accuracy:0.9630 Error: 0.11639 Loss:0.09509 Threads: 8 Forward time: 4.99s Backward time: 3.24s Step time: 3.67s\n", - "2317344 Examples seen. Accuracy:0.9630 Error: 0.07538 Loss:0.04672 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.67s\n", - "2317984 Examples seen. Accuracy:0.9625 Error: 0.07616 Loss:0.10934 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.67s\n", - "2318624 Examples seen. Accuracy:0.9625 Error: 0.14170 Loss:0.16748 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.67s\n", - "2319264 Examples seen. Accuracy:0.9618 Error: 0.07078 Loss:0.06174 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2319904 Examples seen. Accuracy:0.9612 Error: 0.15473 Loss:0.14835 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.65s\n", - "2320544 Examples seen. Accuracy:0.9611 Error: 0.13248 Loss:0.14146 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.65s\n", - "2321184 Examples seen. Accuracy:0.9617 Error: 0.10677 Loss:0.07230 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "2321824 Examples seen. Accuracy:0.9627 Error: 0.01910 Loss:0.01003 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.72s\n", - "2322464 Examples seen. Accuracy:0.9629 Error: 0.16464 Loss:0.35405 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.71s\n", - "2323104 Examples seen. Accuracy:0.9640 Error: 0.06082 Loss:0.06551 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.73s\n", - "2323744 Examples seen. Accuracy:0.9627 Error: 0.17456 Loss:0.26165 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.71s\n", - "2324384 Examples seen. Accuracy:0.9626 Error: 0.09555 Loss:0.11007 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.77s\n", - "2325024 Examples seen. Accuracy:0.9632 Error: 0.10559 Loss:0.11993 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.71s\n", - "2325664 Examples seen. Accuracy:0.9646 Error: 0.04329 Loss:0.02862 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.71s\n", - "2326304 Examples seen. Accuracy:0.9655 Error: 0.08323 Loss:0.06619 Threads: 8 Forward time: 5.09s Backward time: 3.29s Step time: 3.79s\n", - "2326944 Examples seen. Accuracy:0.9649 Error: 0.21153 Loss:0.34516 Threads: 8 Forward time: 5.19s Backward time: 3.35s Step time: 3.80s\n", - "2327584 Examples seen. Accuracy:0.9649 Error: 0.13711 Loss:0.13058 Threads: 8 Forward time: 5.11s Backward time: 3.31s Step time: 3.80s\n", - "2328224 Examples seen. Accuracy:0.9651 Error: 0.07983 Loss:0.07648 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.69s\n", - "2328864 Examples seen. Accuracy:0.9650 Error: 0.10170 Loss:0.09765 Threads: 8 Forward time: 5.03s Backward time: 3.27s Step time: 3.68s\n", - "2329504 Examples seen. Accuracy:0.9645 Error: 0.04890 Loss:0.03065 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "2330144 Examples seen. Accuracy:0.9634 Error: 0.16142 Loss:0.13705 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.67s\n", - "2330784 Examples seen. Accuracy:0.9623 Error: 0.09151 Loss:0.06941 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2331424 Examples seen. Accuracy:0.9630 Error: 0.01054 Loss:0.00552 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.67s\n", - "2332064 Examples seen. Accuracy:0.9615 Error: 0.12485 Loss:0.12610 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.74s\n", - "2332704 Examples seen. Accuracy:0.9619 Error: 0.08962 Loss:0.11357 Threads: 8 Forward time: 4.96s Backward time: 3.19s Step time: 3.67s\n", - "2333344 Examples seen. Accuracy:0.9622 Error: 0.09270 Loss:0.07307 Threads: 8 Forward time: 5.11s Backward time: 3.25s Step time: 3.75s\n", - "2333984 Examples seen. Accuracy:0.9628 Error: 0.06777 Loss:0.04050 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.72s\n", - "2334624 Examples seen. Accuracy:0.9629 Error: 0.09292 Loss:0.09402 Threads: 8 Forward time: 5.06s Backward time: 3.22s Step time: 3.73s\n", - "2335264 Examples seen. Accuracy:0.9635 Error: 0.04261 Loss:0.02376 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.72s\n", - "2335904 Examples seen. Accuracy:0.9632 Error: 0.06623 Loss:0.04791 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "2336544 Examples seen. Accuracy:0.9627 Error: 0.09927 Loss:0.08880 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.70s\n", - "2337184 Examples seen. Accuracy:0.9640 Error: 0.06512 Loss:0.07985 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.79s\n", - "2337824 Examples seen. Accuracy:0.9639 Error: 0.04249 Loss:0.02918 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.64s\n", - "2338464 Examples seen. Accuracy:0.9634 Error: 0.08471 Loss:0.08572 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.68s\n", - "2339104 Examples seen. Accuracy:0.9646 Error: 0.03504 Loss:0.01831 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.67s\n", - "2339744 Examples seen. Accuracy:0.9649 Error: 0.02018 Loss:0.01086 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.66s\n", - "2340384 Examples seen. Accuracy:0.9650 Error: 0.06721 Loss:0.07485 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.70s\n", - "2341024 Examples seen. Accuracy:0.9648 Error: 0.13182 Loss:0.14514 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.68s\n", - "2341664 Examples seen. Accuracy:0.9645 Error: 0.11368 Loss:0.10051 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.67s\n", - "2342304 Examples seen. Accuracy:0.9650 Error: 0.05954 Loss:0.04595 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.71s\n", - "2342944 Examples seen. Accuracy:0.9645 Error: 0.05092 Loss:0.03668 Threads: 8 Forward time: 4.96s Backward time: 3.25s Step time: 3.61s\n", - "2343584 Examples seen. Accuracy:0.9647 Error: 0.06973 Loss:0.07446 Threads: 8 Forward time: 5.07s Backward time: 3.28s Step time: 3.66s\n", - "2344224 Examples seen. Accuracy:0.9625 Error: 0.20134 Loss:0.21778 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.69s\n", - "2344864 Examples seen. Accuracy:0.9629 Error: 0.12162 Loss:0.20828 Threads: 8 Forward time: 4.97s Backward time: 3.17s Step time: 3.62s\n", - "Starting Validation.\n", - "Epochs: 47 Examples seen:2345488 Validation Accuracy: 0.9844 Validation Error: 0.0403 Validation Loss: 0.0403 Total time: 253.64min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 47. Working time: 4.23 hours.\n", - "2346128 Examples seen. Accuracy:0.9621 Error: 0.12571 Loss:0.10451 Threads: 8 Forward time: 5.00s Backward time: 3.19s Step time: 3.71s\n", - "2346768 Examples seen. Accuracy:0.9625 Error: 0.18865 Loss:0.27467 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.60s\n", - "2347408 Examples seen. Accuracy:0.9632 Error: 0.06129 Loss:0.04071 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.61s\n", - "2348048 Examples seen. Accuracy:0.9624 Error: 0.09933 Loss:0.08646 Threads: 8 Forward time: 5.17s Backward time: 3.37s Step time: 3.75s\n", - "2348688 Examples seen. Accuracy:0.9636 Error: 0.01899 Loss:0.01031 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.64s\n", - "2349328 Examples seen. Accuracy:0.9639 Error: 0.04952 Loss:0.03411 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.69s\n", - "2349968 Examples seen. Accuracy:0.9651 Error: 0.05086 Loss:0.03274 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2350608 Examples seen. Accuracy:0.9641 Error: 0.14228 Loss:0.13666 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2351248 Examples seen. Accuracy:0.9642 Error: 0.06595 Loss:0.05662 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.64s\n", - "2351888 Examples seen. Accuracy:0.9641 Error: 0.08222 Loss:0.06902 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.63s\n", - "2352528 Examples seen. Accuracy:0.9639 Error: 0.11847 Loss:0.10436 Threads: 8 Forward time: 5.00s Backward time: 3.25s Step time: 3.62s\n", - "2353168 Examples seen. Accuracy:0.9637 Error: 0.09705 Loss:0.08791 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.63s\n", - "2353808 Examples seen. Accuracy:0.9642 Error: 0.09139 Loss:0.10848 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.61s\n", - "2354448 Examples seen. Accuracy:0.9622 Error: 0.09577 Loss:0.07764 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.61s\n", - "2355088 Examples seen. Accuracy:0.9628 Error: 0.08163 Loss:0.07548 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.59s\n", - "2355728 Examples seen. Accuracy:0.9626 Error: 0.07974 Loss:0.07101 Threads: 8 Forward time: 5.01s Backward time: 3.20s Step time: 3.63s\n", - "2356368 Examples seen. Accuracy:0.9628 Error: 0.06909 Loss:0.04493 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.60s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2357008 Examples seen. Accuracy:0.9620 Error: 0.15517 Loss:0.16409 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.62s\n", - "2357648 Examples seen. Accuracy:0.9628 Error: 0.13398 Loss:0.10719 Threads: 8 Forward time: 5.06s Backward time: 3.26s Step time: 3.64s\n", - "2358288 Examples seen. Accuracy:0.9634 Error: 0.05161 Loss:0.03175 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.67s\n", - "2358928 Examples seen. Accuracy:0.9633 Error: 0.08879 Loss:0.05667 Threads: 8 Forward time: 4.92s Backward time: 3.21s Step time: 3.63s\n", - "2359568 Examples seen. Accuracy:0.9635 Error: 0.03576 Loss:0.02231 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2360208 Examples seen. Accuracy:0.9635 Error: 0.03643 Loss:0.02726 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.64s\n", - "2360848 Examples seen. Accuracy:0.9635 Error: 0.05455 Loss:0.04282 Threads: 8 Forward time: 4.92s Backward time: 3.19s Step time: 3.61s\n", - "2361488 Examples seen. Accuracy:0.9636 Error: 0.03514 Loss:0.03008 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.62s\n", - "2362128 Examples seen. Accuracy:0.9629 Error: 0.08633 Loss:0.09258 Threads: 8 Forward time: 5.04s Backward time: 3.29s Step time: 3.64s\n", - "2362768 Examples seen. Accuracy:0.9633 Error: 0.08147 Loss:0.05970 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.63s\n", - "2363408 Examples seen. Accuracy:0.9634 Error: 0.06049 Loss:0.03963 Threads: 8 Forward time: 5.08s Backward time: 3.28s Step time: 3.69s\n", - "2364048 Examples seen. Accuracy:0.9637 Error: 0.09302 Loss:0.07505 Threads: 8 Forward time: 5.09s Backward time: 3.28s Step time: 3.73s\n", - "2364688 Examples seen. Accuracy:0.9624 Error: 0.05407 Loss:0.04131 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.65s\n", - "2365328 Examples seen. Accuracy:0.9627 Error: 0.12092 Loss:0.14723 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.72s\n", - "2365968 Examples seen. Accuracy:0.9619 Error: 0.12603 Loss:0.10947 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.63s\n", - "2366608 Examples seen. Accuracy:0.9625 Error: 0.08230 Loss:0.07550 Threads: 8 Forward time: 5.04s Backward time: 3.23s Step time: 3.64s\n", - "2367248 Examples seen. Accuracy:0.9619 Error: 0.04252 Loss:0.03187 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.68s\n", - "2367888 Examples seen. Accuracy:0.9626 Error: 0.05655 Loss:0.07812 Threads: 8 Forward time: 5.10s Backward time: 3.27s Step time: 3.66s\n", - "2368528 Examples seen. Accuracy:0.9633 Error: 0.10837 Loss:0.13137 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.65s\n", - "2369168 Examples seen. Accuracy:0.9632 Error: 0.09525 Loss:0.08900 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.64s\n", - "2369808 Examples seen. Accuracy:0.9637 Error: 0.20888 Loss:0.23777 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.66s\n", - "2370448 Examples seen. Accuracy:0.9644 Error: 0.06795 Loss:0.09751 Threads: 8 Forward time: 5.02s Backward time: 3.22s Step time: 3.66s\n", - "2371088 Examples seen. Accuracy:0.9642 Error: 0.09239 Loss:0.16268 Threads: 8 Forward time: 5.04s Backward time: 3.27s Step time: 3.88s\n", - "2371728 Examples seen. Accuracy:0.9642 Error: 0.11508 Loss:0.10845 Threads: 8 Forward time: 5.05s Backward time: 3.26s Step time: 3.65s\n", - "2372368 Examples seen. Accuracy:0.9634 Error: 0.04192 Loss:0.02852 Threads: 8 Forward time: 4.91s Backward time: 3.21s Step time: 3.63s\n", - "2373008 Examples seen. Accuracy:0.9632 Error: 0.09111 Loss:0.10761 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.61s\n", - "2373648 Examples seen. Accuracy:0.9629 Error: 0.13887 Loss:0.16349 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.61s\n", - "2374288 Examples seen. Accuracy:0.9631 Error: 0.07565 Loss:0.06116 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.62s\n", - "2374928 Examples seen. Accuracy:0.9625 Error: 0.13207 Loss:0.20477 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.62s\n", - "2375568 Examples seen. Accuracy:0.9623 Error: 0.10514 Loss:0.08668 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.66s\n", - "2376208 Examples seen. Accuracy:0.9618 Error: 0.05733 Loss:0.05255 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.62s\n", - "2376848 Examples seen. Accuracy:0.9625 Error: 0.06629 Loss:0.05432 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.62s\n", - "2377488 Examples seen. Accuracy:0.9628 Error: 0.12186 Loss:0.11690 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.61s\n", - "2378128 Examples seen. Accuracy:0.9615 Error: 0.08455 Loss:0.05888 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "2378768 Examples seen. Accuracy:0.9629 Error: 0.08426 Loss:0.07770 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.61s\n", - "2379408 Examples seen. Accuracy:0.9624 Error: 0.07930 Loss:0.09174 Threads: 8 Forward time: 5.07s Backward time: 3.26s Step time: 3.62s\n", - "2380048 Examples seen. Accuracy:0.9626 Error: 0.09010 Loss:0.05617 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.62s\n", - "2380688 Examples seen. Accuracy:0.9620 Error: 0.07518 Loss:0.15200 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.62s\n", - "2381328 Examples seen. Accuracy:0.9627 Error: 0.08042 Loss:0.15040 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.62s\n", - "2381968 Examples seen. Accuracy:0.9623 Error: 0.08539 Loss:0.06390 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2382608 Examples seen. Accuracy:0.9620 Error: 0.04215 Loss:0.02372 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.68s\n", - "2383248 Examples seen. Accuracy:0.9634 Error: 0.02382 Loss:0.01421 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.72s\n", - "2383888 Examples seen. Accuracy:0.9637 Error: 0.19136 Loss:0.17446 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.66s\n", - "2384528 Examples seen. Accuracy:0.9632 Error: 0.09805 Loss:0.09111 Threads: 8 Forward time: 4.96s Backward time: 3.23s Step time: 3.65s\n", - "2385168 Examples seen. Accuracy:0.9624 Error: 0.09247 Loss:0.07906 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.65s\n", - "2385808 Examples seen. Accuracy:0.9628 Error: 0.07368 Loss:0.04764 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.67s\n", - "2386448 Examples seen. Accuracy:0.9631 Error: 0.07260 Loss:0.08953 Threads: 8 Forward time: 4.94s Backward time: 3.20s Step time: 3.65s\n", - "2387088 Examples seen. Accuracy:0.9636 Error: 0.06226 Loss:0.04941 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.69s\n", - "2387728 Examples seen. Accuracy:0.9629 Error: 0.04731 Loss:0.03388 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.67s\n", - "2388368 Examples seen. Accuracy:0.9633 Error: 0.04096 Loss:0.02435 Threads: 8 Forward time: 5.05s Backward time: 3.27s Step time: 3.68s\n", - "2389008 Examples seen. Accuracy:0.9635 Error: 0.05838 Loss:0.06045 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.65s\n", - "2389648 Examples seen. Accuracy:0.9637 Error: 0.07119 Loss:0.04667 Threads: 8 Forward time: 4.94s Backward time: 3.19s Step time: 3.73s\n", - "2390288 Examples seen. Accuracy:0.9639 Error: 0.17132 Loss:0.22397 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.71s\n", - "2390928 Examples seen. Accuracy:0.9630 Error: 0.10484 Loss:0.07961 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.71s\n", - "2391568 Examples seen. Accuracy:0.9621 Error: 0.07735 Loss:0.05401 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.68s\n", - "2392208 Examples seen. Accuracy:0.9621 Error: 0.07971 Loss:0.07634 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.64s\n", - "2392848 Examples seen. Accuracy:0.9616 Error: 0.10771 Loss:0.11172 Threads: 8 Forward time: 5.05s Backward time: 3.23s Step time: 3.67s\n", - "2393488 Examples seen. Accuracy:0.9605 Error: 0.04745 Loss:0.03086 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.66s\n", - "2394128 Examples seen. Accuracy:0.9601 Error: 0.04269 Loss:0.02374 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.65s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2394768 Examples seen. Accuracy:0.9598 Error: 0.10447 Loss:0.08630 Threads: 8 Forward time: 5.02s Backward time: 3.21s Step time: 3.63s\n", - "Starting Validation.\n", - "VALIDATION RECORD! Saving NN at SimplePlantLeafDisease.nn\n", - "Epochs: 48 Examples seen:2395392 Validation Accuracy: 0.9859 Validation Error: 0.0398 Validation Loss: 0.0396 Total time: 258.86min\n", - "Layer 0 Max Output: 1.266 Min Output: -2.000 TNNetInput 128,128,3 Times: 0.00s 0.00s\n", - "Layer 1 Neurons: 64 Max Weight: 0.459 Min Weight: -0.377 Max Output: 6.108 Min Output: -5.924 TNNetConvolutionLinear 66,66,64 Times: 8.55s 0.40s Parent:0\n", - "Layer 2 Max Output: 6.108 Min Output: -3.525 TNNetMaxPool 33,33,64 Times: 3.62s 0.06s Parent:1\n", - "Layer 3 Neurons: 1 Max Weight: 0.650 Min Weight: 0.228 Max Output: 9.040 Min Output: -5.770 TNNetMovingStdNormalization 33,33,64 Times: 0.28s 0.00s Parent:2\n", - "Layer 4 Neurons: 64 Max Weight: 0.439 Min Weight: -0.230 Max Output: 11.407 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.87s 0.51s Parent:3\n", - "Layer 5 Neurons: 64 Max Weight: 0.397 Min Weight: -0.367 Max Output: 11.943 Min Output: 0.000 TNNetConvolutionReLU 33,33,64 Times: 5.89s 0.18s Parent:4\n", - "Layer 6 Max Output: 11.943 Min Output: 0.000 TNNetMaxPool 17,17,64 Times: 0.48s 0.02s Parent:5\n", - "Layer 7 Neurons: 64 Max Weight: 0.431 Min Weight: -0.275 Max Output: 8.258 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.44s 0.09s Parent:6\n", - "Layer 8 Neurons: 64 Max Weight: 0.290 Min Weight: -0.235 Max Output: 6.972 Min Output: 0.000 TNNetConvolutionReLU 17,17,64 Times: 1.48s 0.09s Parent:7\n", - "Layer 9 Neurons: 64 Max Weight: 0.253 Min Weight: -0.221 Max Output: 13.311 Min Output: 0.000 TNNetConvolutionReLU 9,9,64 Times: 0.46s 0.02s Parent:8\n", - "Layer 10 Max Output: 13.311 Min Output: 0.000 TNNetDropout 9,9,64 Times: 0.01s 0.00s Parent:9\n", - "Layer 11 Max Output: 13.311 Min Output: 0.000 TNNetMaxPool 5,5,64 Times: 0.05s 0.00s Parent:10\n", - "Layer 12 Neurons: 39 Max Weight: 0.382 Min Weight: -0.396 Max Output: 41.235 Min Output: -17.884 TNNetFullConnectLinear 39,1,1 Times: 0.03s 0.00s Parent:11\n", - "Layer 13 Max Output: 1.000 Min Output: 0.000 TNNetSoftMax 39,1,1 Times: 0.00s 0.00s Parent:12\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 48. Working time: 4.31 hours.\n", - "2396032 Examples seen. Accuracy:0.9600 Error: 0.08347 Loss:0.06521 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.76s\n", - "2396672 Examples seen. Accuracy:0.9615 Error: 0.04975 Loss:0.04257 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.66s\n", - "2397312 Examples seen. Accuracy:0.9630 Error: 0.15518 Loss:0.22900 Threads: 8 Forward time: 4.96s Backward time: 3.18s Step time: 3.64s\n", - "2397952 Examples seen. Accuracy:0.9621 Error: 0.10374 Loss:0.16176 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.63s\n", - "2398592 Examples seen. Accuracy:0.9608 Error: 0.14474 Loss:0.16522 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.69s\n", - "2399232 Examples seen. Accuracy:0.9611 Error: 0.09554 Loss:0.09972 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.67s\n", - "2399872 Examples seen. Accuracy:0.9615 Error: 0.09499 Loss:0.08449 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.66s\n", - "2400512 Examples seen. Accuracy:0.9621 Error: 0.09522 Loss:0.09784 Threads: 8 Forward time: 5.08s Backward time: 3.27s Step time: 3.73s\n", - "2401152 Examples seen. Accuracy:0.9631 Error: 0.03988 Loss:0.02454 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.70s\n", - "2401792 Examples seen. Accuracy:0.9635 Error: 0.04436 Loss:0.03415 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.73s\n", - "2402432 Examples seen. Accuracy:0.9634 Error: 0.08239 Loss:0.05189 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.69s\n", - "2403072 Examples seen. Accuracy:0.9630 Error: 0.10845 Loss:0.08257 Threads: 8 Forward time: 4.99s Backward time: 3.21s Step time: 3.69s\n", - "2403712 Examples seen. Accuracy:0.9625 Error: 0.08418 Loss:0.09886 Threads: 8 Forward time: 5.11s Backward time: 3.28s Step time: 3.72s\n", - "2404352 Examples seen. Accuracy:0.9624 Error: 0.03258 Loss:0.01814 Threads: 8 Forward time: 5.08s Backward time: 3.25s Step time: 3.74s\n", - "2404992 Examples seen. Accuracy:0.9637 Error: 0.02063 Loss:0.01094 Threads: 8 Forward time: 5.02s Backward time: 3.31s Step time: 3.76s\n", - "2405632 Examples seen. Accuracy:0.9641 Error: 0.01119 Loss:0.00600 Threads: 8 Forward time: 5.06s Backward time: 3.25s Step time: 3.71s\n", - "2406272 Examples seen. Accuracy:0.9641 Error: 0.06662 Loss:0.06698 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.73s\n", - "2406912 Examples seen. Accuracy:0.9633 Error: 0.08579 Loss:0.08309 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.62s\n", - "2407552 Examples seen. Accuracy:0.9629 Error: 0.03959 Loss:0.02273 Threads: 8 Forward time: 4.96s Backward time: 3.26s Step time: 3.65s\n", - "2408192 Examples seen. Accuracy:0.9629 Error: 0.07206 Loss:0.07031 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.63s\n", - "2408832 Examples seen. Accuracy:0.9642 Error: 0.03145 Loss:0.01727 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.64s\n", - "2409472 Examples seen. Accuracy:0.9646 Error: 0.15736 Loss:0.20048 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.63s\n", - "2410112 Examples seen. Accuracy:0.9643 Error: 0.11770 Loss:0.09110 Threads: 8 Forward time: 4.92s Backward time: 3.20s Step time: 3.64s\n", - "2410752 Examples seen. Accuracy:0.9635 Error: 0.04128 Loss:0.04368 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.63s\n", - "2411392 Examples seen. Accuracy:0.9625 Error: 0.15233 Loss:0.20459 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.71s\n", - "2412032 Examples seen. Accuracy:0.9632 Error: 0.12874 Loss:0.18221 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.64s\n", - "2412672 Examples seen. Accuracy:0.9635 Error: 0.03913 Loss:0.02232 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.64s\n", - "2413312 Examples seen. Accuracy:0.9621 Error: 0.07859 Loss:0.07963 Threads: 8 Forward time: 5.12s Backward time: 3.30s Step time: 3.83s\n", - "2413952 Examples seen. Accuracy:0.9620 Error: 0.07945 Loss:0.09900 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.71s\n", - "2414592 Examples seen. Accuracy:0.9627 Error: 0.06737 Loss:0.07263 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.68s\n", - "2415232 Examples seen. Accuracy:0.9635 Error: 0.13535 Loss:0.12909 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.65s\n", - "2415872 Examples seen. Accuracy:0.9645 Error: 0.06331 Loss:0.05422 Threads: 8 Forward time: 4.99s Backward time: 3.22s Step time: 3.66s\n", - "2416512 Examples seen. Accuracy:0.9638 Error: 0.11218 Loss:0.10808 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.66s\n", - "2417152 Examples seen. Accuracy:0.9625 Error: 0.06961 Loss:0.04812 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.66s\n", - "2417792 Examples seen. Accuracy:0.9618 Error: 0.09375 Loss:0.10179 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.66s\n", - "2418432 Examples seen. Accuracy:0.9613 Error: 0.06926 Loss:0.11880 Threads: 8 Forward time: 4.96s Backward time: 3.21s Step time: 3.65s\n", - "2419072 Examples seen. Accuracy:0.9620 Error: 0.15010 Loss:0.15108 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.66s\n", - "2419712 Examples seen. Accuracy:0.9622 Error: 0.09841 Loss:0.16576 Threads: 8 Forward time: 4.99s Backward time: 3.19s Step time: 3.67s\n", - "2420352 Examples seen. Accuracy:0.9617 Error: 0.09181 Loss:0.08123 Threads: 8 Forward time: 5.06s Backward time: 3.23s Step time: 3.68s\n", - "2420992 Examples seen. Accuracy:0.9609 Error: 0.07757 Loss:0.06275 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.67s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2421632 Examples seen. Accuracy:0.9611 Error: 0.05869 Loss:0.04683 Threads: 8 Forward time: 4.88s Backward time: 3.19s Step time: 3.65s\n", - "2422272 Examples seen. Accuracy:0.9611 Error: 0.12574 Loss:0.13354 Threads: 8 Forward time: 5.06s Backward time: 3.21s Step time: 3.64s\n", - "2422912 Examples seen. Accuracy:0.9607 Error: 0.07887 Loss:0.05618 Threads: 8 Forward time: 5.10s Backward time: 3.24s Step time: 3.69s\n", - "2423552 Examples seen. Accuracy:0.9612 Error: 0.03758 Loss:0.03187 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.66s\n", - "2424192 Examples seen. Accuracy:0.9606 Error: 0.04157 Loss:0.04722 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.65s\n", - "2424832 Examples seen. Accuracy:0.9604 Error: 0.08743 Loss:0.05742 Threads: 8 Forward time: 4.93s Backward time: 3.23s Step time: 3.66s\n", - "2425472 Examples seen. Accuracy:0.9604 Error: 0.10177 Loss:0.18647 Threads: 8 Forward time: 4.93s Backward time: 3.21s Step time: 3.65s\n", - "2426112 Examples seen. Accuracy:0.9612 Error: 0.14764 Loss:0.13367 Threads: 8 Forward time: 4.98s Backward time: 3.23s Step time: 3.67s\n", - "2426752 Examples seen. Accuracy:0.9621 Error: 0.02184 Loss:0.01407 Threads: 8 Forward time: 5.04s Backward time: 3.26s Step time: 3.68s\n", - "2427392 Examples seen. Accuracy:0.9618 Error: 0.07716 Loss:0.07552 Threads: 8 Forward time: 5.09s Backward time: 3.30s Step time: 3.76s\n", - "2428032 Examples seen. Accuracy:0.9612 Error: 0.10830 Loss:0.10426 Threads: 8 Forward time: 5.03s Backward time: 3.23s Step time: 3.70s\n", - "2428672 Examples seen. Accuracy:0.9618 Error: 0.08810 Loss:0.06320 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "2429312 Examples seen. Accuracy:0.9612 Error: 0.09988 Loss:0.11013 Threads: 8 Forward time: 5.01s Backward time: 3.28s Step time: 3.71s\n", - "2429952 Examples seen. Accuracy:0.9618 Error: 0.10986 Loss:0.19201 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.66s\n", - "2430592 Examples seen. Accuracy:0.9623 Error: 0.04762 Loss:0.02820 Threads: 8 Forward time: 5.02s Backward time: 3.27s Step time: 3.76s\n", - "2431232 Examples seen. Accuracy:0.9625 Error: 0.06206 Loss:0.03979 Threads: 8 Forward time: 5.01s Backward time: 3.23s Step time: 3.72s\n", - "2431872 Examples seen. Accuracy:0.9633 Error: 0.02842 Loss:0.01543 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.71s\n", - "2432512 Examples seen. Accuracy:0.9639 Error: 0.16059 Loss:0.18375 Threads: 8 Forward time: 5.00s Backward time: 3.27s Step time: 3.71s\n", - "2433152 Examples seen. Accuracy:0.9640 Error: 0.07255 Loss:0.08636 Threads: 8 Forward time: 4.97s Backward time: 3.24s Step time: 3.70s\n", - "2433792 Examples seen. Accuracy:0.9650 Error: 0.05588 Loss:0.05599 Threads: 8 Forward time: 5.00s Backward time: 3.23s Step time: 3.73s\n", - "2434432 Examples seen. Accuracy:0.9646 Error: 0.09000 Loss:0.07720 Threads: 8 Forward time: 4.94s Backward time: 3.22s Step time: 3.73s\n", - "2435072 Examples seen. Accuracy:0.9645 Error: 0.09038 Loss:0.09173 Threads: 8 Forward time: 5.10s Backward time: 3.31s Step time: 3.72s\n", - "2435712 Examples seen. Accuracy:0.9643 Error: 0.02760 Loss:0.01909 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.80s\n", - "2436352 Examples seen. Accuracy:0.9663 Error: 0.07000 Loss:0.04519 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.69s\n", - "2436992 Examples seen. Accuracy:0.9662 Error: 0.19523 Loss:0.20279 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.65s\n", - "2437632 Examples seen. Accuracy:0.9672 Error: 0.06155 Loss:0.06979 Threads: 8 Forward time: 5.08s Backward time: 3.28s Step time: 3.72s\n", - "2438272 Examples seen. Accuracy:0.9667 Error: 0.08146 Loss:0.06760 Threads: 8 Forward time: 5.07s Backward time: 3.27s Step time: 3.72s\n", - "2438912 Examples seen. Accuracy:0.9666 Error: 0.12747 Loss:0.17507 Threads: 8 Forward time: 4.93s Backward time: 3.26s Step time: 3.64s\n", - "2439552 Examples seen. Accuracy:0.9673 Error: 0.09810 Loss:0.07829 Threads: 8 Forward time: 4.90s Backward time: 3.24s Step time: 3.67s\n", - "2440192 Examples seen. Accuracy:0.9675 Error: 0.06045 Loss:0.03975 Threads: 8 Forward time: 4.95s Backward time: 3.20s Step time: 3.68s\n", - "2440832 Examples seen. Accuracy:0.9674 Error: 0.10832 Loss:0.08924 Threads: 8 Forward time: 5.10s Backward time: 3.33s Step time: 3.68s\n", - "2441472 Examples seen. Accuracy:0.9668 Error: 0.06639 Loss:0.05132 Threads: 8 Forward time: 4.94s Backward time: 3.24s Step time: 3.72s\n", - "2442112 Examples seen. Accuracy:0.9669 Error: 0.11082 Loss:0.10048 Threads: 8 Forward time: 5.01s Backward time: 3.26s Step time: 3.71s\n", - "2442752 Examples seen. Accuracy:0.9652 Error: 0.18231 Loss:0.19762 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.66s\n", - "2443392 Examples seen. Accuracy:0.9647 Error: 0.12482 Loss:0.08833 Threads: 8 Forward time: 5.07s Backward time: 3.22s Step time: 3.66s\n", - "2444032 Examples seen. Accuracy:0.9642 Error: 0.13215 Loss:0.09087 Threads: 8 Forward time: 5.06s Backward time: 3.22s Step time: 3.68s\n", - "2444672 Examples seen. Accuracy:0.9641 Error: 0.09566 Loss:0.07425 Threads: 8 Forward time: 5.01s Backward time: 3.25s Step time: 3.67s\n", - "Starting Validation.\n", - "Epochs: 49 Examples seen:2445296 Validation Accuracy: 0.9848 Validation Error: 0.0397 Validation Loss: 0.0398 Total time: 264.13min\n", - "Epoch time: 4.8 minutes. 100 epochs: 8 hours.\n", - "Epochs: 49. Working time: 4.4 hours.\n", - "2445936 Examples seen. Accuracy:0.9647 Error: 0.09779 Loss:0.10613 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.71s\n", - "2446576 Examples seen. Accuracy:0.9645 Error: 0.09939 Loss:0.10191 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.72s\n", - "2447216 Examples seen. Accuracy:0.9651 Error: 0.08636 Loss:0.06837 Threads: 8 Forward time: 5.03s Backward time: 3.20s Step time: 3.72s\n", - "2447856 Examples seen. Accuracy:0.9639 Error: 0.11687 Loss:0.11101 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.72s\n", - "2448496 Examples seen. Accuracy:0.9633 Error: 0.07291 Loss:0.05694 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.74s\n", - "2449136 Examples seen. Accuracy:0.9633 Error: 0.10625 Loss:0.12971 Threads: 8 Forward time: 5.04s Backward time: 3.24s Step time: 3.72s\n", - "2449776 Examples seen. Accuracy:0.9636 Error: 0.08616 Loss:0.07411 Threads: 8 Forward time: 4.96s Backward time: 3.24s Step time: 3.73s\n", - "2450416 Examples seen. Accuracy:0.9631 Error: 0.11457 Loss:0.08526 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.71s\n", - "2451056 Examples seen. Accuracy:0.9623 Error: 0.09590 Loss:0.10775 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.74s\n", - "2451696 Examples seen. Accuracy:0.9613 Error: 0.14777 Loss:0.12411 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.74s\n", - "2452336 Examples seen. Accuracy:0.9612 Error: 0.06299 Loss:0.06043 Threads: 8 Forward time: 4.89s Backward time: 3.22s Step time: 3.70s\n", - "2452976 Examples seen. Accuracy:0.9609 Error: 0.09018 Loss:0.09205 Threads: 8 Forward time: 5.03s Backward time: 3.24s Step time: 3.69s\n", - "2453616 Examples seen. Accuracy:0.9597 Error: 0.13754 Loss:0.21120 Threads: 8 Forward time: 4.97s Backward time: 3.21s Step time: 3.72s\n", - "2454256 Examples seen. Accuracy:0.9599 Error: 0.14907 Loss:0.12133 Threads: 8 Forward time: 4.95s Backward time: 3.23s Step time: 3.71s\n", - "2454896 Examples seen. Accuracy:0.9604 Error: 0.03802 Loss:0.02116 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.68s\n", - "2455536 Examples seen. Accuracy:0.9612 Error: 0.20154 Loss:0.22435 Threads: 8 Forward time: 5.00s Backward time: 3.26s Step time: 3.70s\n", - "2456176 Examples seen. Accuracy:0.9609 Error: 0.10072 Loss:0.12594 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.73s\n", - "2456816 Examples seen. Accuracy:0.9600 Error: 0.09720 Loss:0.08089 Threads: 8 Forward time: 4.97s Backward time: 3.20s Step time: 3.68s\n", - "2457456 Examples seen. Accuracy:0.9604 Error: 0.14106 Loss:0.11614 Threads: 8 Forward time: 4.95s Backward time: 3.22s Step time: 3.68s\n", - "2458096 Examples seen. Accuracy:0.9611 Error: 0.05850 Loss:0.04063 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.71s\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "2458736 Examples seen. Accuracy:0.9610 Error: 0.10419 Loss:0.10172 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.73s\n", - "2459376 Examples seen. Accuracy:0.9600 Error: 0.06087 Loss:0.05568 Threads: 8 Forward time: 4.91s Backward time: 3.23s Step time: 3.64s\n", - "2460016 Examples seen. Accuracy:0.9606 Error: 0.15841 Loss:0.14317 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.72s\n", - "2460656 Examples seen. Accuracy:0.9603 Error: 0.06821 Loss:0.06307 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.65s\n", - "2461296 Examples seen. Accuracy:0.9601 Error: 0.08654 Loss:0.09037 Threads: 8 Forward time: 5.03s Backward time: 3.26s Step time: 3.76s\n", - "2461936 Examples seen. Accuracy:0.9590 Error: 0.07632 Loss:0.06424 Threads: 8 Forward time: 5.06s Backward time: 3.24s Step time: 3.68s\n", - "2462576 Examples seen. Accuracy:0.9589 Error: 0.09366 Loss:0.15949 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.68s\n", - "2463216 Examples seen. Accuracy:0.9593 Error: 0.12086 Loss:0.12483 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.66s\n", - "2463856 Examples seen. Accuracy:0.9597 Error: 0.12919 Loss:0.20393 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.67s\n", - "2464496 Examples seen. Accuracy:0.9604 Error: 0.05858 Loss:0.03614 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.66s\n", - "2465136 Examples seen. Accuracy:0.9599 Error: 0.01954 Loss:0.01035 Threads: 8 Forward time: 4.92s Backward time: 3.23s Step time: 3.66s\n", - "2465776 Examples seen. Accuracy:0.9612 Error: 0.07069 Loss:0.05159 Threads: 8 Forward time: 5.01s Backward time: 3.21s Step time: 3.64s\n", - "2466416 Examples seen. Accuracy:0.9619 Error: 0.14101 Loss:0.18235 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.68s\n", - "2467056 Examples seen. Accuracy:0.9627 Error: 0.02598 Loss:0.01503 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.71s\n", - "2467696 Examples seen. Accuracy:0.9643 Error: 0.04241 Loss:0.04202 Threads: 8 Forward time: 4.98s Backward time: 3.25s Step time: 3.68s\n", - "2468336 Examples seen. Accuracy:0.9642 Error: 0.08662 Loss:0.05508 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.72s\n", - "2468976 Examples seen. Accuracy:0.9632 Error: 0.11560 Loss:0.09782 Threads: 8 Forward time: 4.96s Backward time: 3.20s Step time: 3.70s\n", - "2469616 Examples seen. Accuracy:0.9637 Error: 0.08343 Loss:0.07203 Threads: 8 Forward time: 5.03s Backward time: 3.22s Step time: 3.69s\n", - "2470256 Examples seen. Accuracy:0.9638 Error: 0.05606 Loss:0.03345 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.73s\n", - "2470896 Examples seen. Accuracy:0.9634 Error: 0.09244 Loss:0.12337 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.70s\n", - "2471536 Examples seen. Accuracy:0.9642 Error: 0.06850 Loss:0.05097 Threads: 8 Forward time: 5.01s Backward time: 3.17s Step time: 3.67s\n", - "2472176 Examples seen. Accuracy:0.9642 Error: 0.12828 Loss:0.28280 Threads: 8 Forward time: 5.11s Backward time: 3.27s Step time: 3.75s\n", - "2472816 Examples seen. Accuracy:0.9636 Error: 0.06643 Loss:0.06648 Threads: 8 Forward time: 5.05s Backward time: 3.28s Step time: 3.73s\n", - "2473456 Examples seen. Accuracy:0.9632 Error: 0.09598 Loss:0.07311 Threads: 8 Forward time: 4.99s Backward time: 3.23s Step time: 3.72s\n", - "2474096 Examples seen. Accuracy:0.9626 Error: 0.07572 Loss:0.06650 Threads: 8 Forward time: 4.97s Backward time: 3.22s Step time: 3.76s\n", - "2474736 Examples seen. Accuracy:0.9624 Error: 0.16787 Loss:0.20741 Threads: 8 Forward time: 5.01s Backward time: 3.22s Step time: 3.65s\n", - "2475376 Examples seen. Accuracy:0.9635 Error: 0.06696 Loss:0.06897 Threads: 8 Forward time: 4.98s Backward time: 3.22s Step time: 3.64s\n", - "2476016 Examples seen. Accuracy:0.9635 Error: 0.09461 Loss:0.07941 Threads: 8 Forward time: 5.06s Backward time: 3.27s Step time: 3.70s\n", - "2476656 Examples seen. Accuracy:0.9638 Error: 0.11986 Loss:0.11124 Threads: 8 Forward time: 5.02s Backward time: 3.23s Step time: 3.68s\n", - "2477296 Examples seen. Accuracy:0.9639 Error: 0.12191 Loss:0.15176 Threads: 8 Forward time: 5.01s Backward time: 3.24s Step time: 3.73s\n", - "2477936 Examples seen. Accuracy:0.9641 Error: 0.07233 Loss:0.05448 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.61s\n", - "2478576 Examples seen. Accuracy:0.9644 Error: 0.09154 Loss:0.11773 Threads: 8 Forward time: 5.02s Backward time: 3.25s Step time: 3.61s\n", - "2479216 Examples seen. Accuracy:0.9647 Error: 0.04387 Loss:0.02665 Threads: 8 Forward time: 4.99s Backward time: 3.25s Step time: 3.65s\n", - "2479856 Examples seen. Accuracy:0.9641 Error: 0.08265 Loss:0.05985 Threads: 8 Forward time: 4.97s Backward time: 3.23s Step time: 3.63s\n", - "2480496 Examples seen. Accuracy:0.9630 Error: 0.04707 Loss:0.02981 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.62s\n", - "2481136 Examples seen. Accuracy:0.9628 Error: 0.10169 Loss:0.07258 Threads: 8 Forward time: 4.97s Backward time: 3.25s Step time: 3.61s\n", - "2481776 Examples seen. Accuracy:0.9626 Error: 0.03753 Loss:0.02160 Threads: 8 Forward time: 4.97s Backward time: 3.26s Step time: 3.62s\n", - "2482416 Examples seen. Accuracy:0.9629 Error: 0.07974 Loss:0.06922 Threads: 8 Forward time: 4.97s Backward time: 3.27s Step time: 3.63s\n", - "2483056 Examples seen. Accuracy:0.9627 Error: 0.09616 Loss:0.07745 Threads: 8 Forward time: 5.07s Backward time: 3.29s Step time: 3.77s\n", - "2483696 Examples seen. Accuracy:0.9619 Error: 0.04000 Loss:0.02443 Threads: 8 Forward time: 5.04s Backward time: 3.28s Step time: 3.68s\n", - "2484336 Examples seen. Accuracy:0.9624 Error: 0.10765 Loss:0.08541 Threads: 8 Forward time: 5.26s Backward time: 3.35s Step time: 3.79s\n", - "2484976 Examples seen. Accuracy:0.9607 Error: 0.15562 Loss:0.11084 Threads: 8 Forward time: 5.14s Backward time: 3.28s Step time: 3.73s\n", - "2485616 Examples seen. Accuracy:0.9610 Error: 0.07639 Loss:0.05440 Threads: 8 Forward time: 4.99s Backward time: 3.26s Step time: 3.74s\n", - "2486256 Examples seen. Accuracy:0.9622 Error: 0.09286 Loss:0.06327 Threads: 8 Forward time: 4.98s Backward time: 3.24s Step time: 3.64s\n", - "2486896 Examples seen. Accuracy:0.9621 Error: 0.05640 Loss:0.03882 Threads: 8 Forward time: 5.02s Backward time: 3.24s Step time: 3.65s\n", - "2487536 Examples seen. Accuracy:0.9634 Error: 0.03119 Loss:0.01744 Threads: 8 Forward time: 5.05s Backward time: 3.25s Step time: 3.64s\n", - "2488176 Examples seen. Accuracy:0.9627 Error: 0.08898 Loss:0.07472 Threads: 8 Forward time: 4.95s Backward time: 3.24s Step time: 3.61s\n", - "2488816 Examples seen. Accuracy:0.9642 Error: 0.02770 Loss:0.01556 Threads: 8 Forward time: 5.04s Backward time: 3.25s Step time: 3.62s\n", - "2489456 Examples seen. Accuracy:0.9641 Error: 0.09492 Loss:0.06707 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.63s\n", - "2490096 Examples seen. Accuracy:0.9653 Error: 0.03905 Loss:0.02173 Threads: 8 Forward time: 4.99s Backward time: 3.20s Step time: 3.64s\n", - "2490736 Examples seen. Accuracy:0.9659 Error: 0.05739 Loss:0.03664 Threads: 8 Forward time: 5.00s Backward time: 3.22s Step time: 3.69s\n", - "2491376 Examples seen. Accuracy:0.9668 Error: 0.04905 Loss:0.03240 Threads: 8 Forward time: 5.03s Backward time: 3.25s Step time: 3.63s\n", - "2492016 Examples seen. Accuracy:0.9659 Error: 0.04817 Loss:0.02833 Threads: 8 Forward time: 4.93s Backward time: 3.22s Step time: 3.62s\n", - "2492656 Examples seen. Accuracy:0.9655 Error: 0.13183 Loss:0.18951 Threads: 8 Forward time: 5.00s Backward time: 3.24s Step time: 3.63s\n", - "2493296 Examples seen. Accuracy:0.9649 Error: 0.15715 Loss:0.15009 Threads: 8 Forward time: 4.95s Backward time: 3.21s Step time: 3.61s\n", - "2493936 Examples seen. Accuracy:0.9644 Error: 0.16351 Loss:0.17889 Threads: 8 Forward time: 4.96s Backward time: 3.22s Step time: 3.65s\n", - "2494576 Examples seen. Accuracy:0.9644 Error: 0.09626 Loss:0.06891 Threads: 8 Forward time: 5.00s Backward time: 3.21s Step time: 3.64s\n", - "Starting Validation.\n", - "Epochs: 50 Examples seen:2495200 Validation Accuracy: 0.9855 Validation Error: 0.0393 Validation Loss: 0.0392 Total time: 269.39min\n", - "Starting Testing.\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Epochs: 50 Examples seen:2495200 Test Accuracy: 0.9895 Test Error: 0.0335 Test Loss: 0.0307 Total time: 269.87min\n", - "Epoch time: 4.7 minutes. 100 epochs: 7.9 hours.\n", - "Epochs: 50. Working time: 4.5 hours.\n", - "Finished.\n" - ] - } - ], - "source": [ - "if os.path.isdir('plant'):\n", - " print(\"RUNNING: SimplePlantLeafDisease\")\n", - " !neural-api/bin/x86_64-linux/bin/SimplePlantLeafDisease" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "Python 3", - "language": "python", - "name": "python3" }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.6.9" - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file diff --git a/examples/StringManipulation/README.md b/examples/StringManipulation/README.md new file mode 100644 index 00000000..b26b56bb --- /dev/null +++ b/examples/StringManipulation/README.md @@ -0,0 +1,63 @@ +# Predicting the next character in a string + +In this source code example, to make things very simple, the dataset has 3 strings: +``` + FDataset[0] := 'happy good morning.'+chr(1); + FDataset[1] := 'fantastic good evening.'+chr(1); + FDataset[2] := 'superb good night.'+chr(1); +``` + +The neural network is built with: + +``` + FNN.AddLayer([ + TNNetInput.Create(csContextLen, 1, csVocabSize), + TNNetPointwiseConvReLU.Create(32), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectLinear.Create(csVocabSize), + TNNetSoftMax.Create() + ]); +``` + +The constants above are defined with: +``` +const + csContextLen = 64; // The input string can have up to 64 characters. + csVocabSize = 128; // Character based vocabulary/dictionary. + csMinSampleSize = 3; // Minimum of 3 characters. +``` + +After training the NN, we test with: + +``` + WriteLn('Testing.'); + WriteLn(GenerateStringFromChars(FNN, 'happy')); + WriteLn(GenerateStringFromChars(FNN, 'fantastic')); + WriteLn(GenerateStringFromChars(FNN, 'superb')); +``` + +Then NN gets it 100% right with the following output: +``` +happy good morning. +fantastic good evening. +superb good night. +``` + +## How does it work? +Each character is encoded into a number from 0 to 127. This number from 0 to 127 is then transformed into a vector with 128 elements where only one element values 1. This is called “one-hot encoding” (https://en.wikipedia.org/wiki/One-hot). +This is why the input is defined with (csContextLen, 1, csVocabSize). Then, to decrease the dimensionality from 128 dimensions in the vector, a pointwise convolution is used (TNNetPointwiseConvReLU). +Each input character is converted into an 128 elements vector in reverse order so “good” will become “doog”. The last layer of the NN has 128 elements so the element with highest value (or probability) is the next predicted character. + +The one-hot encoding is done with one API call: +``` +procedure TVolume.OneHotEncodingReversed(aTokens: string); overload; +``` + +In the case that your input is not a string, you could call the following instead: + +``` +procedure TVolume.OneHotEncoding(aTokens: array of integer); overload; +``` + +So, for each character predicted, this character is added to the input string and the process is repeated until a termination token is found (chr(0) or chr(1)). This is how calling GenerateStringFromChars(FNN, 'superb') outputs ‘superb good night’. diff --git a/examples/StringManipulation/StringManipulation.lpi b/examples/StringManipulation/StringManipulation.lpi new file mode 100644 index 00000000..b6a21be7 --- /dev/null +++ b/examples/StringManipulation/StringManipulation.lpi @@ -0,0 +1,169 @@ +<?xml version="1.0" encoding="UTF-8"?> +<CONFIG> + <ProjectOptions> + <Version Value="12"/> + <PathDelim Value="\"/> + <General> + <Flags> + <MainUnitHasCreateFormStatements Value="False"/> + <MainUnitHasTitleStatement Value="False"/> + <MainUnitHasScaledStatement Value="False"/> + <CompatibilityMode Value="True"/> + </Flags> + <SessionStorage Value="InProjectDir"/> + <Title Value="Simple String Manipulation"/> + <UseAppBundle Value="False"/> + <ResourceType Value="res"/> + </General> + <i18n> + <EnableI18N LFM="False"/> + </i18n> + <BuildModes Count="3"> + <Item1 Name="Default" Default="True"/> + <Item2 Name="Debug"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\StringManipulation"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\examples\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <Parsing> + <SyntaxOptions> + <IncludeAssertionCode Value="True"/> + </SyntaxOptions> + </Parsing> + <CodeGeneration> + <Checks> + <IOChecks Value="True"/> + <RangeChecks Value="True"/> + <OverflowChecks Value="True"/> + <StackChecks Value="True"/> + </Checks> + <VerifyObjMethodCallValidity Value="True"/> + </CodeGeneration> + <Linking> + <Debugging> + <DebugInfoType Value="dsDwarf2Set"/> + <UseValgrind Value="True"/> + <UseExternalDbgSyms Value="True"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dDebug +-dAVX"/> + <OtherDefines Count="2"> + <Define0 Value="Debug"/> + <Define1 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item2> + <Item3 Name="Release"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\StringManipulation"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <SmartLinkUnit Value="True"/> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + <DebugInfoType Value="dsDwarf2Set"/> + </Debugging> + <LinkSmart Value="True"/> + </Linking> + <Other> + <CustomOptions Value="-dRelease +-dAVX"/> + <OtherDefines Count="5"> + <Define0 Value="Release"/> + <Define1 Value="Debug"/> + <Define2 Value="CheckRange"/> + <Define3 Value="AVX2"/> + <Define4 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item3> + </BuildModes> + <PublishOptions> + <Version Value="2"/> + </PublishOptions> + <RunParams> + <FormatVersion Value="2"/> + <Modes Count="1"> + <Mode0 Name="default"/> + </Modes> + </RunParams> + <RequiredPackages Count="1"> + <Item1> + <PackageName Value="multithreadprocslaz"/> + </Item1> + </RequiredPackages> + <Units Count="1"> + <Unit0> + <Filename Value="StringManipulation.lpr"/> + <IsPartOfProject Value="True"/> + </Unit0> + </Units> + </ProjectOptions> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\StringManipulation"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\experiments\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dAVX +-dRelease"/> + <OtherDefines Count="2"> + <Define0 Value="AVX"/> + <Define1 Value="Release"/> + </OtherDefines> + </Other> + </CompilerOptions> + <Debugging> + <Exceptions Count="3"> + <Item1> + <Name Value="EAbort"/> + </Item1> + <Item2> + <Name Value="ECodetoolError"/> + </Item2> + <Item3> + <Name Value="EFOpenError"/> + </Item3> + </Exceptions> + </Debugging> +</CONFIG> diff --git a/examples/StringManipulation/StringManipulation.lpr b/examples/StringManipulation/StringManipulation.lpr new file mode 100644 index 00000000..99f39c2c --- /dev/null +++ b/examples/StringManipulation/StringManipulation.lpr @@ -0,0 +1,158 @@ +program StringManipulation; +(* +HypotenuseFitLoading: learns how to calculate hypotenuse sqrt(X^2 + Y^2). +Copyright (C) 2023 Joao Paulo Schwarz Schuler + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along +with this program; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +*) + +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, + neuralnetwork, + neuralvolume, + neuralfit, + CustApp, + Math; + +const + csContextLen = 64; + csVocabSize = 128; // Character based vocabulary/dictionary. + csMinSampleSize = 3; // Minimum of 3 characters. + +type + + { TStringManipulation } + + TStringManipulation = class(TCustomApplication) + protected + FDataset: array of string; + FDatasetSize: integer; + FNN: TNNet; + procedure LoadDataset; + procedure DoRun; override; + public + procedure GetTrainingPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + procedure GetValidationPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + procedure GetTestPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + end; + + procedure TStringManipulation.LoadDataset; + begin + SetLength(FDataset, 3); + FDataset[0] := 'happy good morning.'+chr(1); + FDataset[1] := 'fantastic good evening.'+chr(1); + FDataset[2] := 'superb good night.'+chr(1); + FDatasetSize := Length(FDataset); + end; + + procedure TStringManipulation.DoRun; + var + NFit: TNeuralDataLoadingFit; + begin + LoadDataset(); + FNN := TNNet.Create(); + NFit := TNeuralDataLoadingFit.Create(); + + FNN.AddLayer([ + TNNetInput.Create(csContextLen, 1, csVocabSize), + TNNetPointwiseConvReLU.Create(32), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectReLU.Create(32), + TNNetFullConnectLinear.Create(csVocabSize), + TNNetSoftMax.Create() + ]); + FNN.DebugStructure; + + WriteLn('Computing...'); + //NFit.MaxThreadNum := 1; + NFit.InitialLearningRate := 0.001; + NFit.LearningRateDecay := 0; + NFit.L2Decay := 0; + NFit.LogEveryBatches := 100; + NFit.EnableClassComparison(); + NFit.EnableDefaultLoss(); + NFit.AvgWeightEpochCount := 1; + NFit.FitLoading( + FNN, + {TrainingVolumesCount=}10000, + {ValidationVolumesCount=}1000, + {TestVolumesCount=}1000, + {batchsize=}32, + {epochs=}50, + @GetTrainingPair, @GetValidationPair, @GetTestPair + ); + FNN.DebugWeights(); + WriteLn('Testing.'); + + WriteLn(GenerateStringFromChars(FNN, 'happy')); + WriteLn(GenerateStringFromChars(FNN, 'fantastic')); + WriteLn(GenerateStringFromChars(FNN, 'superb')); + + NFit.Free; + FNN.Free; + Write('Press ENTER to exit.'); + ReadLn; + Terminate; + end; + + procedure TStringManipulation.GetTrainingPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + var + SampleId: integer; + SampleLen: integer; + SampleCutPosition: integer; + ExpectedTokenChar: char; + ExpectedTokenInt: integer; + begin + // Make sure that expected input and output have the proper sizes. + if FNN.GetFirstLayer().Output.Size <> pInput.Size then pInput.ReSize(FNN.GetFirstLayer().Output); + if FNN.GetLastLayer().Output.Size <> pOutput.Size then pOutput.ReSize(FNN.GetLastLayer().Output); + // Get the input sample + SampleId := Random(FDatasetSize); + SampleLen := Length(FDataset[SampleId]); + SampleCutPosition := Random(SampleLen-csMinSampleSize)+csMinSampleSize; // -1 + // The expected token is the next character in the string + ExpectedTokenChar := FDataset[SampleId][SampleCutPosition+1]; + ExpectedTokenInt := Min(Ord(ExpectedTokenChar),pInput.Depth-1); + // Encode the input and output volumes + pInput.OneHotEncodingReversed(copy(FDataset[SampleId], 1, SampleCutPosition)); + pOutput.SetClassForSoftMax(ExpectedTokenInt); + pOutput.Tag := ExpectedTokenInt; + end; + + procedure TStringManipulation.GetValidationPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + begin + GetTrainingPair(Idx, ThreadId, pInput, pOutput); + end; + + procedure TStringManipulation.GetTestPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + begin + GetTrainingPair(Idx, ThreadId, pInput, pOutput); + end; + +var + Application: TStringManipulation; +begin + Application := TStringManipulation.Create(nil); + Application.Title:='Simple String Manipulation'; + Application.Run; + Application.Free; +end. + diff --git a/examples/SuperResolution/README.md b/examples/SuperResolution/README.md index e9aa6340..e2be0637 100644 --- a/examples/SuperResolution/README.md +++ b/examples/SuperResolution/README.md @@ -1,8 +1,8 @@ -# Simple Super Resolution Example +# Super Resolution Command Line Tool <img align="right" src="results/street_result.png"></img> ## Introduction The image at the right side shows an example. The smaller image is the original image while the bigger image is the image processed twice by a neural network trained to increase image resolution. -This example has been created via the **SuperResolution.lpi** command line tool with: +This example has been created via the **SuperResolution.lpi** command line tool. The parameter `-i` defines the input file while `-o` defines the output file: ``` #SuperResolution -i street.png -o street2.png diff --git a/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm b/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm index 9bad9f7c..7bd19b97 100644 --- a/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm +++ b/examples/VisualAutoencoder/uvisualautoencodertinyimagenet.lfm @@ -11,7 +11,7 @@ object FormVisualLearning: TFormVisualLearning OnCreate = FormCreate OnDestroy = FormDestroy Position = poScreenCenter - LCLVersion = '2.0.2.0' + LCLVersion = '2.0.12.0' object ButLearn: TButton Left = 768 Height = 45 diff --git a/examples/sentimentAnalysis/sentimentAnalysis.lpi b/examples/sentimentAnalysis/sentimentAnalysis.lpi new file mode 100644 index 00000000..ce8ea96d --- /dev/null +++ b/examples/sentimentAnalysis/sentimentAnalysis.lpi @@ -0,0 +1,169 @@ +<?xml version="1.0" encoding="UTF-8"?> +<CONFIG> + <ProjectOptions> + <Version Value="12"/> + <PathDelim Value="\"/> + <General> + <Flags> + <MainUnitHasCreateFormStatements Value="False"/> + <MainUnitHasTitleStatement Value="False"/> + <MainUnitHasScaledStatement Value="False"/> + <CompatibilityMode Value="True"/> + </Flags> + <SessionStorage Value="InProjectDir"/> + <Title Value="Sentiment Analysis"/> + <UseAppBundle Value="False"/> + <ResourceType Value="res"/> + </General> + <i18n> + <EnableI18N LFM="False"/> + </i18n> + <BuildModes Count="3"> + <Item1 Name="Default" Default="True"/> + <Item2 Name="Debug"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\sentimentAnalysis"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\examples\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <Parsing> + <SyntaxOptions> + <IncludeAssertionCode Value="True"/> + </SyntaxOptions> + </Parsing> + <CodeGeneration> + <Checks> + <IOChecks Value="True"/> + <RangeChecks Value="True"/> + <OverflowChecks Value="True"/> + <StackChecks Value="True"/> + </Checks> + <VerifyObjMethodCallValidity Value="True"/> + </CodeGeneration> + <Linking> + <Debugging> + <DebugInfoType Value="dsDwarf2Set"/> + <UseValgrind Value="True"/> + <UseExternalDbgSyms Value="True"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dDebug +-dAVX"/> + <OtherDefines Count="2"> + <Define0 Value="Debug"/> + <Define1 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item2> + <Item3 Name="Release"> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\sentimentAnalysis"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <SmartLinkUnit Value="True"/> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + <DebugInfoType Value="dsDwarf2Set"/> + </Debugging> + <LinkSmart Value="True"/> + </Linking> + <Other> + <CustomOptions Value="-dRelease +-dAVX"/> + <OtherDefines Count="5"> + <Define0 Value="Release"/> + <Define1 Value="Debug"/> + <Define2 Value="CheckRange"/> + <Define3 Value="AVX2"/> + <Define4 Value="AVX"/> + </OtherDefines> + </Other> + </CompilerOptions> + </Item3> + </BuildModes> + <PublishOptions> + <Version Value="2"/> + </PublishOptions> + <RunParams> + <FormatVersion Value="2"/> + <Modes Count="1"> + <Mode0 Name="default"/> + </Modes> + </RunParams> + <RequiredPackages Count="1"> + <Item1> + <PackageName Value="multithreadprocslaz"/> + </Item1> + </RequiredPackages> + <Units Count="1"> + <Unit0> + <Filename Value="sentimentAnalysis.lpr"/> + <IsPartOfProject Value="True"/> + </Unit0> + </Units> + </ProjectOptions> + <CompilerOptions> + <Version Value="11"/> + <PathDelim Value="\"/> + <Target> + <Filename Value="..\..\bin\$(TargetCPU)-$(TargetOS)\bin\sentimentAnalysis"/> + </Target> + <SearchPaths> + <IncludeFiles Value="..\..\experiments\neural;$(ProjOutDir)"/> + <OtherUnitFiles Value="$(LazarusDir)\lcl\units\$(TargetCPU)-$(TargetOS);$(LazarusDir)\components\lazutils\lib\$(TargetCPU)-$(TargetOS);..\..\neural"/> + <UnitOutputDirectory Value="..\..\bin\$(TargetCPU)-$(TargetOS)\units"/> + </SearchPaths> + <CodeGeneration> + <Optimizations> + <OptimizationLevel Value="3"/> + </Optimizations> + </CodeGeneration> + <Linking> + <Debugging> + <GenerateDebugInfo Value="False"/> + </Debugging> + </Linking> + <Other> + <CustomOptions Value="-dAVX +-dRelease"/> + <OtherDefines Count="2"> + <Define0 Value="AVX"/> + <Define1 Value="Release"/> + </OtherDefines> + </Other> + </CompilerOptions> + <Debugging> + <Exceptions Count="3"> + <Item1> + <Name Value="EAbort"/> + </Item1> + <Item2> + <Name Value="ECodetoolError"/> + </Item2> + <Item3> + <Name Value="EFOpenError"/> + </Item3> + </Exceptions> + </Debugging> +</CONFIG> diff --git a/examples/sentimentAnalysis/sentimentAnalysis.lpr b/examples/sentimentAnalysis/sentimentAnalysis.lpr new file mode 100644 index 00000000..bce93a3d --- /dev/null +++ b/examples/sentimentAnalysis/sentimentAnalysis.lpr @@ -0,0 +1,284 @@ +program sentimentAnalysis; +(* +sentimentAnalysis: learns how the sentiment in the sst2 dataset: +https://huggingface.co/datasets/sst2 +Copyright (C) 2023 Joao Paulo Schwarz Schuler + +This program is free software; you can redistribute it and/or modify +it under the terms of the GNU General Public License as published by +the Free Software Foundation; either version 2 of the License, or +any later version. + +This program is distributed in the hope that it will be useful, +but WITHOUT ANY WARRANTY; without even the implied warranty of +MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along +with this program; if not, write to the Free Software Foundation, Inc., +51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. +*) + +{$mode objfpc}{$H+} + +uses {$IFDEF UNIX} {$IFDEF UseCThreads} + cthreads, {$ENDIF} {$ENDIF} + Classes, + neuralnetwork, + neuralvolume, + neuralfit, + neuralthread, + neuraldatasets, + CustApp, + Math, + SysUtils; + +const + csContextLen = 81; + csTrainingFileName = 'sst2_local/sst2_train.txt'; + csValidationFileName = 'sst2_local/sst2_validation.txt'; + csAutosavedFileName = 'sentiment.nn'; + csVocabSize = 128; // ASCII character based vocabulary/dictionary. + csMinSampleSize = 3; // Minimum of 3 characters. + csClassToStr: array[0..1] of string = ('negative','positive'); + +type + + { TTestFitLoading } + + TTestFitLoading = class(TCustomApplication) + private + procedure TestFromFile; + protected + FDataset: TStringList; + FDatasetClasses: TIntegerList; + FDatasetSize: integer; + FDatasetValidation: TStringList; + FDatasetValidationClasses: TIntegerList; + FDatasetValidationSize: integer; + FNN: TNNet; + NFit: TNeuralDataLoadingFit; + procedure LoadDataset; + procedure DoRun; override; + procedure WriteClassFromChars(str: string); + public + procedure OnAfterEpoch(Sender: TObject); + procedure GetTrainingPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + procedure GetValidationPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + procedure GetTestPair(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); + end; + +function LoadDatasetFromFile(filename: string; DsText: TStringList; DsClasses: TIntegerList): integer; +var + LargeFile: TextFile; + StrLine: string; + ClassChar: char; +begin + WriteLn('Loading: ',filename); + AssignFile(LargeFile, filename); + Reset(LargeFile); + while not Eof(LargeFile) do + begin + ReadLn(LargeFile, StrLine); + if Length(StrLine)>csMinSampleSize then + begin + StrLine := LowerCase(StrLine); + ClassChar := StrLine[1]; + if (ClassChar = '0') or (ClassChar = '1') then + begin + DsClasses.Add(StrToInt(ClassChar)); + DsText.Add(Copy(StrLine, 3, Length(StrLine) - 2)); + end; + end; + end; + CloseFile(LargeFile); + WriteLn('Loaded ',filename,' dataset with ', DsText.Count, ' rows'); + result := DsText.Count; +end; + + + procedure TTestFitLoading.LoadDataset; + var + I: integer; + begin + FDatasetSize := LoadDatasetFromFile(csTrainingFileName, FDataset, FDatasetClasses); + // creates a dataset validation removing elements from the training; + if FDatasetSize > 0 then + begin + for I := FDatasetSize-1 downto 0 do + begin + if (I mod 21 = 0) then + begin + FDatasetValidation.Add(FDataset[I]); + FDatasetValidationClasses.Add(FDatasetClasses[I]); + FDataset.Delete(I); + FDatasetClasses.Delete(I); + end; + end; + end; + FDatasetSize := FDataset.Count; + FDatasetValidationSize := FDatasetValidation.Count; + // FDatasetValidationSize := LoadDatasetFromFile(csValidationFileName, FDatasetValidation, FDatasetValidationClasses); + end; + + procedure TTestFitLoading.DoRun; + begin + FDataset := TStringList.Create(); + FDatasetClasses := TIntegerList.Create(); + FDatasetValidation := TStringList.Create(); + FDatasetValidationClasses := TIntegerList.Create(); + + LoadDataset(); + FNN := TNNet.Create(); + NFit := TNeuralDataLoadingFit.Create(); + + FNN.AddLayer([ + TNNetInput.Create(csContextLen, 1, csVocabSize), + TNNetPointwiseConv.Create(32,1), + TNNetDropout.Create(0.5), + TNNetPadXY.Create(1,0), + TNNetConvolutionReLU.Create(64,3,0,1,1), + TNNetMaxPool.Create(3), + TNNetPadXY.Create(1,0), + TNNetConvolutionReLU.Create(128,3,0,1,1), + TNNetPointwiseConvReLU.Create(1024,1), + TNNetMaxPoolWithPosition.Create(27,27,0,1,0), + // TNNetFullConnectReLU.Create(1024), + TNNetFullConnectLinear.Create(2, 1), + TNNetSoftMax.Create() + ]); + DebugThreadCount(); + FNN.DebugStructure; + + WriteLn('Computing...'); + //NFit.MaxThreadNum := 1; + NFit.LogEveryBatches := 100; + NFit.InitialLearningRate := 0.001; + NFit.LearningRateDecay := 0; + NFit.L2Decay := 0; + NFit.EnableClassComparison(); + NFit.EnableDefaultLoss(); + NFit.AvgWeightEpochCount := 1; + NFit.OnAfterEpoch := @OnAfterEpoch; + NFit.FitLoading( + FNN, + {TrainingVolumesCount=}FDatasetSize, + {ValidationVolumesCount=}FDatasetValidationSize, + {TestVolumesCount=}FDatasetValidationSize, + {batchsize=}32, + {epochs=}50, + @GetTrainingPair, @GetValidationPair, @GetTestPair + ); + FNN.DebugWeights(); + OnAfterEpoch(Self); + + NFit.Free; + FNN.Free; + FDatasetValidationClasses.Free; + FDatasetValidation.Free; + FDatasetClasses.Free; + FDataset.Free; + Write('Press ENTER to exit.'); + ReadLn; + Terminate; + end; + + procedure TTestFitLoading.WriteClassFromChars(str: String); + var + ClassId: integer; + begin + ClassId := GetClassFromChars(NFit.NN, str); + WriteLn('"',str, '" is ',csClassToStr[ClassId],'.'); + end; + + + procedure TTestFitLoading.OnAfterEpoch(Sender: TObject); + begin + WriteLn('Testing.'); + WriteClassFromChars('the is fantastic!'); + WriteClassFromChars('I hated this movie.'); + WriteClassFromChars('Horrible. I do not recommend.'); + end; + + procedure TTestFitLoading.GetTrainingPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + var + SampleId: integer; + SampleLen: integer; + SampleStr: string; + SampleClass: integer; + begin + // Make sure that expected input and output have the proper sizes. + if FNN.GetFirstLayer().Output.Size <> pInput.Size then pInput.ReSize(FNN.GetFirstLayer().Output); + if FNN.GetLastLayer().Output.Size <> pOutput.Size then pOutput.ReSize(FNN.GetLastLayer().Output); + // Get the input sample + SampleId := Random(FDatasetSize); + SampleStr := RemoveRandomChars(FDataset[SampleId], Length(FDataset[SampleId]) div 10); + //SampleStr := RemoveRandomWord(FDataset[SampleId]); + SampleClass := FDatasetClasses[SampleId]; + SampleLen := Length(SampleStr); + if SampleLen > pInput.SizeX then SampleStr := copy(SampleStr, 1, pInput.SizeX); + // Encode the input and output volumes + pInput.OneHotEncodingReversed(SampleStr); + pOutput.SetClassForSoftMax(SampleClass); + pOutput.Tag := SampleClass; + end; + + procedure TTestFitLoading.GetValidationPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + var + SampleId: integer; + SampleLen: integer; + SampleStr: string; + SampleClass: integer; + begin + // Make sure that expected input and output have the proper sizes. + if FNN.GetFirstLayer().Output.Size <> pInput.Size then pInput.ReSize(FNN.GetFirstLayer().Output); + if FNN.GetLastLayer().Output.Size <> pOutput.Size then pOutput.ReSize(FNN.GetLastLayer().Output); + // Get the input sample + SampleId := Random(FDatasetValidationSize); + SampleStr := FDatasetValidation[SampleId]; + SampleClass := FDatasetValidationClasses[SampleId]; + SampleLen := Length(SampleStr); + if SampleLen > pInput.SizeX then SampleStr := copy(SampleStr, 1, pInput.SizeX); + // Encode the input and output volumes + pInput.OneHotEncodingReversed(SampleStr); + pOutput.SetClassForSoftMax(SampleClass); + pOutput.Tag := SampleClass; + end; + + procedure TTestFitLoading.GetTestPair(Idx: integer; ThreadId: integer; + pInput, pOutput: TNNetVolume); + begin + GetValidationPair(Idx, ThreadId, pInput, pOutput); + end; + + procedure TTestFitLoading.TestFromFile; + var + S: string; + NN: TNNet; + begin + NN := TNNet.Create(); + WriteLn('Loading neural network.'); + NN.LoadFromFile(csAutosavedFileName); + NN.DebugStructure(); + WriteLn(); + WriteLn('Write something and I will tell you if it is positive or negative.'); + repeat + Write('User: '); + ReadLn(S); + WriteClassFromChars(S); + until S = 'exit'; + NN.Free; + end; + +var + Application: TTestFitLoading; +begin + Application := TTestFitLoading.Create(nil); + Application.Title:='Nano Covolutional Based NLP Trained from File'; + //Application.TestFromFile; + Application.Run; + Application.Free; +end. + diff --git a/neural/Neural.AVX.pas b/neural/Neural.AVX.pas deleted file mode 100644 index c121e30f..00000000 --- a/neural/Neural.AVX.pas +++ /dev/null @@ -1,112 +0,0 @@ -unit Neural.AVX; - -// ########################################### -// #### 32 bit intel avx functions -// ########################################### - -interface - -{$IFDEF CPUX64} -{$DEFINE x64} -{$ENDIF} -{$IFDEF cpux86_64} -{$DEFINE x64} -{$ENDIF} -{$IFNDEF x64} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} - -{$ENDIF} - -implementation - -{$IFNDEF x64} - -{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; -// eax = x, edx = y, ecx = N -asm - // iters - imul ecx, -4; - - // helper registers for the mt1, mt2 and dest pointers - sub eax, ecx; - sub edx, ecx; - - {$IFDEF FPC}vxorpd ymm0, ymm0, ymm0;{$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} - - // unrolled loop - @Loop1: - add ecx, 128; - jg @loopEnd1; - - {$IFDEF FPC}vmovupd ymm1, [eax + ecx - 128];{$ELSE}db $C5,$FD,$10,$4C,$08,$80;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [edx + ecx - 128];{$ELSE}db $C5,$FD,$10,$54,$0A,$80;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [eax + ecx - 96];{$ELSE}db $C5,$FD,$10,$5C,$08,$A0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [edx + ecx - 96];{$ELSE}db $C5,$FD,$10,$64,$0A,$A0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm1, [eax + ecx - 64];{$ELSE}db $C5,$FD,$10,$4C,$08,$C0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [edx + ecx - 64];{$ELSE}db $C5,$FD,$10,$54,$0A,$C0;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [eax + ecx - 32];{$ELSE}db $C5,$FD,$10,$5C,$08,$E0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [edx + ecx - 32];{$ELSE}db $C5,$FD,$10,$64,$0A,$E0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - jmp @Loop1; - - @loopEnd1: - - {$IFDEF FPC}vextractf128 xmm2, ymm0, 1;{$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm2;{$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} - - sub ecx, 128; - jz @loop2End; - - // loop to get all fitting into an array of 4 - @Loop2: - add ecx, 16; - jg @Loop2End; - - {$IFDEF FPC}vmovupd xmm3, [eax + ecx - 16];{$ELSE}db $C5,$F9,$10,$5C,$08,$F0;{$ENDIF} - {$IFDEF FPC}vmovupd xmm4, [edx + ecx - 16];{$ELSE}db $C5,$F9,$10,$64,$0A,$F0;{$ENDIF} - {$IFDEF FPC}vmulps xmm3, xmm3, xmm4;{$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps xmm0, xmm0, xmm3;{$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} - jmp @Loop2; - - @Loop2End: - - // handle last 2 elements - sub ecx, 16; - jz @loop3End; - - @loop3: - add ecx, 4; - jg @loop3End; - - {$IFDEF FPC}vmovss xmm3, [eax + ecx - 4];{$ELSE}db $C5,$FA,$10,$5C,$08,$FC;{$ENDIF} - {$IFDEF FPC}vmovss xmm4, [edx + ecx - 4];{$ELSE}db $C5,$FA,$10,$64,$0A,$FC;{$ENDIF} - {$IFDEF FPC}vmulss xmm3, xmm3, xmm4;{$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddss xmm0, xmm0, xmm3;{$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} - - jmp @loop3; - @loop3End: - - // build result - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vzeroupper;{$ELSE}db $C5,$F8,$77;{$ENDIF} - movss Result, xmm0; -end; - -{$ENDIF} - -end. diff --git a/neural/Neural.AVXx64.pas b/neural/Neural.AVXx64.pas deleted file mode 100644 index f4c1c870..00000000 --- a/neural/Neural.AVXx64.pas +++ /dev/null @@ -1,120 +0,0 @@ -unit Neural.AVXx64; - -// ########################################### -// #### 64 bit intel avx functions -// ########################################### - -interface - -{$IFDEF CPUX64} -{$DEFINE x64} -{$ENDIF} -{$IFDEF cpux86_64} -{$DEFINE x64} -{$ENDIF} -{$IFDEF x64} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC}assembler;{$ENDIF} - -{$ENDIF} - -implementation - -{$IFDEF x64} - -{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; -asm - {$IFDEF UNIX} - // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI - // The parameters are passed in the following order: - // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 - mov r8, rdx; - mov rdx, rsi; - mov rcx, rdi; - {$ENDIF} - - // iters - imul r8, -4; - - // helper registers for the mt1, mt2 and dest pointers - sub rcx, r8; - sub rdx, r8; - - {$IFDEF FPC}vxorpd ymm0, ymm0, ymm0;{$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} - - // unrolled loop - @Loop1: - add r8, 128; - jg @loopEnd1; - - {$IFDEF FPC}vmovupd ymm1, [rcx + r8 - 128];{$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$80;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [rdx + r8 - 128];{$ELSE}db $C4,$A1,$7D,$10,$54,$02,$80;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [rcx + r8 - 96];{$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$A0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [rdx + r8 - 96];{$ELSE}db $C4,$A1,$7D,$10,$64,$02,$A0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm1, [rcx + r8 - 64];{$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$C0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [rdx + r8 - 64];{$ELSE}db $C4,$A1,$7D,$10,$54,$02,$C0;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [rcx + r8 - 32];{$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$E0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [rdx + r8 - 32];{$ELSE}db $C4,$A1,$7D,$10,$64,$02,$E0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - jmp @Loop1; - - @loopEnd1: - - {$IFDEF FPC}vextractf128 xmm2, ymm0, 1;{$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm2;{$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} - - sub r8, 128; - jz @loop2End; - - // loop to get all fitting into an array of 4 - @Loop2: - add r8, 16; - jg @Loop2End; - - {$IFDEF FPC}vmovupd xmm3, [rcx + r8 - 16];{$ELSE}db $C4,$A1,$79,$10,$5C,$01,$F0;{$ENDIF} - {$IFDEF FPC}vmovupd xmm4, [rdx + r8 - 16];{$ELSE}db $C4,$A1,$79,$10,$64,$02,$F0;{$ENDIF} - {$IFDEF FPC}vmulps xmm3, xmm3, xmm4;{$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps xmm0, xmm0, xmm3;{$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} - jmp @Loop2; - - @Loop2End: - - // handle last 2 elements - sub r8, 16; - jz @loop3End; - - @loop3: - add r8, 4; - jg @loop3End; - - {$IFDEF FPC}vmovss xmm3, [rcx + r8 - 4];{$ELSE}db $C4,$A1,$7A,$10,$5C,$01,$FC;{$ENDIF} - {$IFDEF FPC}vmovss xmm4, [rdx + r8 - 4];{$ELSE}db $C4,$A1,$7A,$10,$64,$02,$FC;{$ENDIF} - {$IFDEF FPC}vmulss xmm3, xmm3, xmm4;{$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddss xmm0, xmm0, xmm3;{$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} - - jmp @loop3; - @loop3End: - - // build result - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vzeroupper;{$ELSE}db $C5,$F8,$77;{$ENDIF} - movss Result, xmm0; -end; - -{$ENDIF} - -end. diff --git a/neural/NeuralAVX.pas b/neural/NeuralAVX.pas new file mode 100644 index 00000000..827a96ee --- /dev/null +++ b/neural/NeuralAVX.pas @@ -0,0 +1,222 @@ +unit NeuralAVX; + +// ########################################### +// #### 32 bit intel avx functions +// ########################################### + +interface + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} +{$IFNDEF x64} + +{$DEFINE AVXSUP} // assembler support for AVX/FMA built in +{$IFNDEF FPC} +{$IF CompilerVersion<35} +{$UNDEF AVXSUP} +{$IFEND} +{$ENDIF} + + +// performs Result = sum(x[i]*y[i]); +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} + +// performs x[i] = x[i] + fact*y[i]; +procedure AVXMulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} + +{$ENDIF} + +implementation + +{$IFNDEF x64} + +{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; +// eax = x, edx = y, ecx = N +asm + // iters + imul ecx, -4; + + // helper registers for the x, y pointers + sub eax, ecx; + sub edx, ecx; + + {$IFDEF AVXSUP}vxorpd ymm0, ymm0, ymm0; {$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} + + // unrolled loop + @Loop1: + add ecx, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 128]; {$ELSE}db $C5,$FD,$10,$4C,$08,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 128]; {$ELSE}db $C5,$FD,$10,$54,$0A,$80;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [eax + ecx - 96]; {$ELSE}db $C5,$FD,$10,$5C,$08,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [edx + ecx - 96]; {$ELSE}db $C5,$FD,$10,$64,$0A,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 64]; {$ELSE}db $C5,$FD,$10,$4C,$08,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 64]; {$ELSE}db $C5,$FD,$10,$54,$0A,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [eax + ecx - 32]; {$ELSE}db $C5,$FD,$10,$5C,$08,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [edx + ecx - 32]; {$ELSE}db $C5,$FD,$10,$64,$0A,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + {$IFDEF AVXSUP}vextractf128 xmm2, ymm0, 1; {$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm2; {$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} + + sub ecx, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add ecx, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm3, [eax + ecx - 16]; {$ELSE}db $C5,$F9,$10,$5C,$08,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm4, [edx + ecx - 16]; {$ELSE}db $C5,$F9,$10,$64,$0A,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmulps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm3; {$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub ecx, 16; + jz @loop3End; + + @loop3: + add ecx, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm3, [eax + ecx - 4]; {$ELSE}db $C5,$FA,$10,$5C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm4, [edx + ecx - 4]; {$ELSE}db $C5,$FA,$10,$64,$0A,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmulss xmm3, xmm3, xmm4; {$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm0, xmm0, xmm3; {$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop3; + @loop3End: + + // build result + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + +procedure AVXMulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +// eax = x, edx = y, ecx = N +asm + push esi; + + // broadcast factor to ymm0 + lea esi, fact; + {$IFDEF AVXSUP}vbroadcastss ymm0, fact; {$ELSE}db $C4,$E2,$7D,$18,$45,$08;{$ENDIF} + + // iters + imul ecx, -4; + + // helper registers for the x, y + sub eax, ecx; + sub edx, ecx; + + // unrolled loop + @Loop1: + add ecx, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 128]; {$ELSE}db $C5,$FD,$10,$4C,$08,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 128]; {$ELSE}db $C5,$FD,$10,$54,$0A,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 128], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 96]; {$ELSE}db $C5,$FD,$10,$4C,$08,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 96]; {$ELSE}db $C5,$FD,$10,$54,$0A,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 96], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 64]; {$ELSE}db $C5,$FD,$10,$4C,$08,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 64]; {$ELSE}db $C5,$FD,$10,$54,$0A,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 64], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 32]; {$ELSE}db $C5,$FD,$10,$4C,$08,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 32]; {$ELSE}db $C5,$FD,$10,$54,$0A,$E0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 32], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$E0;{$ENDIF} + jmp @Loop1; + + @loopEnd1: + + sub ecx, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add ecx, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm1, [eax + ecx - 16]; {$ELSE}db $C5,$F9,$10,$4C,$08,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm2, [edx + ecx - 16]; {$ELSE}db $C5,$F9,$10,$54,$0A,$F0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps xmm2, xmm2, xmm0; {$ELSE}db $C5,$E8,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 16], xmm1; {$ELSE}db $C5,$F9,$11,$4C,$08,$F0;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub ecx, 16; + jz @loop3End; + + @loop3: + add ecx, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm1, [eax + ecx - 4]; {$ELSE}db $C5,$FA,$10,$4C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm2, [edx + ecx - 4]; {$ELSE}db $C5,$FA,$10,$54,$0A,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmulss xmm2, xmm2, xmm0; {$ELSE}db $C5,$EA,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm1, xmm1, xmm2; {$ELSE}db $C5,$F2,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovss [eax + ecx - 4], xmm1; {$ELSE}db $C5,$FA,$11,$4C,$08,$FC;{$ENDIF} + + jmp @loop3; + @loop3End: + + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + pop esi; +end; + +{$ENDIF} + +end. diff --git a/neural/NeuralAVXx64.pas b/neural/NeuralAVXx64.pas new file mode 100644 index 00000000..cfa48736 --- /dev/null +++ b/neural/NeuralAVXx64.pas @@ -0,0 +1,235 @@ +unit NeuralAVXx64; + +// ########################################### +// #### 64 bit intel avx functions +// ########################################### + +interface + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF x64} + +{$DEFINE AVXSUP} // assembler support for AVX/FMA built in +{$IFNDEF FPC} +{$IF CompilerVersion<35} +{$UNDEF AVXSUP} +{$IFEND} +{$ENDIF} + +// performs Result = sum(x[i]*y[i]); +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC}assembler;{$ENDIF} + +// performs x[i] = x[i] + fact*y[i]; +procedure AVXMulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ENDIF} + + +{$ENDIF} + +implementation + +{$IFDEF x64} + +{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; +asm + {$IFDEF UNIX} + // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI + // The parameters are passed in the following order: + // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 + mov r8, rdx; + mov rdx, rsi; + mov rcx, rdi; + {$ENDIF} + + // iters + imul r8, -4; + + // helper registers for the mt1, mt2 and dest pointers + sub rcx, r8; + sub rdx, r8; + + {$IFDEF AVXSUP}vxorpd ymm0, ymm0, ymm0; {$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} + + // unrolled loop + @Loop1: + add r8, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$80;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [rcx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [rdx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$64,$02,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [rcx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [rdx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$64,$02,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + {$IFDEF AVXSUP}vextractf128 xmm2, ymm0, 1; {$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm2; {$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} + + sub r8, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add r8, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm3, [rcx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$5C,$01,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm4, [rdx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$64,$02,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmulps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm3; {$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub r8, 16; + jz @loop3End; + + @loop3: + add r8, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm3, [rcx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$5C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm4, [rdx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$64,$02,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmulss xmm3, xmm3, xmm4; {$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm0, xmm0, xmm3; {$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop3; + @loop3End: + + // build result + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + +procedure AVXMulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +// eax = x, edx = y, ecx = N +asm + {$IFDEF UNIX} + // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI + // The parameters are passed in the following order: + // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 + mov r8, rdx; + mov rdx, rsi; + mov rcx, rdi; + {$ENDIF} + + // iters + imul r8, -4; + + // helper registers for the mt1, mt2 and dest pointers + sub rcx, r8; + sub rdx, r8; + + // broadcast factor to ymm0 + {$IFDEF AVXSUP}vbroadcastss ymm0, xmm3; {$ELSE}db $C4,$E2,$7D,$18,$C3;{$ENDIF} + + // unrolled loop + @Loop1: + add r8, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 128], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 96], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 64], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$E0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 32], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$E0;{$ENDIF} + jmp @Loop1; + + @loopEnd1: + + sub r8, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add r8, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm1, [rcx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$4C,$01,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm2, [rdx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$54,$02,$F0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps xmm2, xmm2, xmm0; {$ELSE}db $C5,$E8,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 16], xmm1; {$ELSE}db $C4,$A1,$79,$11,$4C,$01,$F0;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub r8, 16; + jz @loop3End; + + @loop3: + add r8, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm1, [rcx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$4C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm2, [rdx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$54,$02,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmulss xmm2, xmm2, xmm0; {$ELSE}db $C5,$EA,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm1, xmm1, xmm2; {$ELSE}db $C5,$F2,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovss [rcx + r8 - 4], xmm1; {$ELSE}db $C4,$A1,$7A,$11,$4C,$01,$FC;{$ENDIF} + + jmp @loop3; + @loop3End: + + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} +end; + +{$ENDIF} + +end. diff --git a/neural/neuraldatasets.pas b/neural/neuraldatasets.pas index 45b04ccf..ce73b870 100644 --- a/neural/neuraldatasets.pas +++ b/neural/neuraldatasets.pas @@ -187,7 +187,7 @@ TClassesAndElements = class(TStringStringListVolume) {$ENDIF} // Loads an image from a file and stores it into a Volume. - function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume):boolean; overload; + procedure LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume); overload; // Loads an image from a file and stores it into a Volume resizing to // SizeX, SizeY and optionally encoding as neuronal input if has a @@ -277,6 +277,38 @@ procedure TestBatch // This function translates the original CIFAR10 labels to Animal/Machine labels. procedure TranslateCifar10VolumesToMachineAnimal(VolumeList: TNNetVolumeList); +{ + RandomSubstring: + This NLP function takes a string as input and returns a substring that starts + immediately after a randomly selected space character within the input string. + If there are no spaces in the input string, the entire string is returned as is. + The function is useful for obtaining a random piece of text from a given string, + which can be applied in various scenarios that require text randomization. + + Space positions are tracked using a TIntegerList. The Copy function is used + to extract the substring from the randomly selected space position to the end + of the input string. +} +function RandomSubstring(const InputString: string): string; + +{ + RemoveRandomChars: + This function takes a string and an integer count as input. It removes Count + number of characters at random positions from the given string Str. The length + of the string is recalculated in each iteration to account for the reduction in + the string's length after each character removal. +} +function RemoveRandomChars(const Str: string; Count: integer): string; + + +// This function randomly removes one word from the input string. +function RemoveRandomWord(const Str: string): string; + +type TNNetAAInteger = array of array of integer; + +procedure LoadIntegersInCSV(filename: string; + var aTokens: TNNetAAInteger; MaxRows: integer = 0); + {$IFNDEF FPC} function SwapEndian(I:integer):integer; procedure FindAllDirectories(AList: TStrings; const SearchPath: String; @@ -568,7 +600,7 @@ procedure TClassesAndElements.LoadImages(color_encoding: integer; NewSizeX: inte {$IFDEF Debug} Self.LoadImages_NTL(0,1); {$ELSE} - NTL.StartProc(@Self.LoadImages_NTL); + NTL.StartProc({$IFDEF FPC}@{$ENDIF}Self.LoadImages_NTL); {$ENDIF} end; NTL.Free; @@ -715,16 +747,16 @@ function TClassesAndElements.FileCountAtClassId(ClassId: integer): integer; end; {$IFDEF FPC} -function TFileNameList.ThreadSafeLoadImageFromFileIntoVolume( - ImageFileName: string; V: TNNetVolume): boolean; +procedure TFileNameList.ThreadSafeLoadImageFromFileIntoVolume( + ImageFileName: string; V: TNNetVolume); var M: TFPMemoryImage; begin M := TFPMemoryImage.Create(1, 1); {$IFDEF HASTHREADS}EnterCriticalSection(FCritSecLoad);{$ENDIF} - Result := M.LoadFromFile( ImageFileName ); + M.LoadFromFile( ImageFileName ); {$IFDEF HASTHREADS}LeaveCriticalSection(FCritSecLoad);{$ENDIF} - if Result then LoadImageIntoVolume(M, V); + LoadImageIntoVolume(M, V); M.Free; end; @@ -819,6 +851,7 @@ procedure LoadPictureIntoVolume(Picture: TPicture; Vol:TNNetVolume); end; end; + procedure TFileNameList.ThreadSafeLoadImageFromFileIntoVolume( ImageFileName: string; V: TNNetVolume); var @@ -832,7 +865,7 @@ procedure TFileNameList.ThreadSafeLoadImageFromFileIntoVolume( LocalPicture.Free; end; -function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume):boolean; +procedure LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume); var LocalPicture: TPicture; begin @@ -840,7 +873,6 @@ function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume):boolea LocalPicture.LoadFromFile( ImageFileName ); LoadPictureIntoVolume(LocalPicture, V); LocalPicture.Free; - Result := true; end; (* @@ -1339,25 +1371,20 @@ function LoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume; var VAux: TNNetVolume; begin - if LoadImageFromFileIntoVolume(ImageFileName, V) then + LoadImageFromFileIntoVolume(ImageFileName, V); + if (V.SizeX<>SizeX) or (V.SizeY<>SizeY) then begin - if (V.SizeX<>SizeX) or (V.SizeY<>SizeY) then - begin - VAux := TNNetVolume.Create; - VAux.Copy(V); - V.CopyResizing(VAux, SizeX, SizeY); - VAux.Free; - end; - if (EncodeNeuronalInput >= 0) then - begin - V.RgbImgToNeuronalInput( (EncodeNeuronalInput) and 255 ); - end; - Result := true; - end - else + VAux := TNNetVolume.Create; + VAux.Copy(V); + V.CopyResizing(VAux, SizeX, SizeY); + VAux.Free; + end; + if (EncodeNeuronalInput >= 0) then begin - Result := false; + V.RgbImgToNeuronalInput( (EncodeNeuronalInput) and 255 ); end; + + Result := True; end; procedure ConfusionWriteCSVHeader(var CSVConfusion: TextFile; Labels: array of string); @@ -1704,4 +1731,125 @@ procedure TestBatch pOutput.Free; end; +function RemoveRandomWord(const Str: string): string; +var + WordList: TNNetStringList; + RandomWordIndex: integer; +begin + Result := Str; + // Split the string into words based on spaces. + WordList := CreateTokenizedStringList(Result,' '); + // Check if there are any words to remove. + if WordList.Count > 1 then + begin + // Select a random word to remove. + RandomWordIndex := Random(WordList.Count); + WordList.Delete(RandomWordIndex); + // Reconstruct the string from the remaining words. + Result := WordList.DelimitedText; + end; + // Free the TStringList to prevent memory leaks. + WordList.Free; +end; + +procedure LoadIntegersInCSV(filename: string; var aTokens: TNNetAAInteger; + MaxRows: integer = 0); +var + LargeFile: TextFile; + StrLine: string; + RowCnt, WordCnt: integer; + Separator: TNNetStringList; +begin + Separator := CreateTokenizedStringList(','); + RowCnt := 0; + //WriteLn('Counting rows from: ', filename); + AssignFile(LargeFile, filename); + Reset(LargeFile); + while (not Eof(LargeFile)) and ( (MaxRows=0) or (RowCnt<MaxRows) ) do + begin + ReadLn(LargeFile, StrLine); + RowCnt := RowCnt + 1; + end; + CloseFile(LargeFile); + //WriteLn('Loading: ', filename); + SetLength(aTokens, RowCnt); + //WriteLn('Loading ', RowCnt,' rows.'); + Reset(LargeFile); + RowCnt := 0; + while (not Eof(LargeFile)) and ( (MaxRows=0) or (RowCnt<MaxRows) ) do + begin + ReadLn(LargeFile, StrLine); + Separator.DelimitedText := StrLine; + SetLength(aTokens[RowCnt], Separator.Count); + if Separator.Count > 0 then + begin + for WordCnt := 0 to Separator.Count - 1 do + begin + aTokens[RowCnt][WordCnt] := StrToInt(Separator[WordCnt]); + end; + end; + RowCnt := RowCnt + 1; + end; + CloseFile(LargeFile); +end; + +function RemoveRandomChars(const Str: string; Count: integer): string; +var + i: integer; + StrLen: integer; +begin + Result := Str; + // Calculate the length of the string before removing characters. + StrLen := Length(Result); + if (Count > 0) and (StrLen>1) then + begin + // Loop for the number of characters to be removed. + for i := 1 to Count do + begin + // Check if the string is not empty. + if StrLen > 1 then + begin + // Randomly select a character position and remove one character from that position. + // The '+ 1' is necessary because Pascal strings are 1-indexed, not 0-indexed. + Delete(Result, Random(StrLen) + 1, 1); + Dec(StrLen); + end; + end; + end; +end; + + +function RandomSubstring(const InputString: string): string; +var + SpacePositions: TIntegerList; + I, RandomSpacePos: Integer; + InputStringLen: integer; +begin + InputStringLen := Length(InputString); + if InputStringLen > 0 then + begin + // Create a new integer list instance + SpacePositions := TIntegerList.Create; + // Find the positions of all spaces in the string + for I := 1 to InputStringLen do + begin + if InputString[I] = ' ' then + begin + SpacePositions.Add(I); + end; + end; + + // Append -1 to handle the case with no spaces + SpacePositions.Add(0); + + // Randomly select one of the space positions + RandomSpacePos := SpacePositions[Random(SpacePositions.Count)]; + + // Return the substring starting from the position after the random space + Result := Copy(InputString, RandomSpacePos + 1, InputStringLen - RandomSpacePos); + SpacePositions.Free; + end + else Result := ''; +end; + end. diff --git a/neural/neuralfit.pas b/neural/neuralfit.pas index 7fb44a97..ef08acca 100644 --- a/neural/neuralfit.pas +++ b/neural/neuralfit.pas @@ -54,37 +54,39 @@ TNeuralFitBase = class(TMObject) FCurrentEpoch: integer; FCurrentStep: integer; FCurrentTrainingError: TNeuralFloat; + FCustomLearningRateScheduleFn: TCustomLearningRateScheduleFn; + FCustomLearningRateScheduleObjFn: TCustomLearningRateScheduleObjFn; + FDataAugmentation: boolean; + FFinishedThread: TNNetVolume; + {$IFDEF HASTHREADS}FCritSec: TRTLCriticalSection;{$ENDIF} FNN: TNNet; FGlobalHit: integer; FGlobalMiss: integer; FGlobalTotal: integer; FGlobalTotalLoss: single; FGlobalErrorSum: single; - FFinishedThread: TNNetVolume; - {$IFDEF HASTHREADS}FCritSec: TRTLCriticalSection;{$ENDIF} - FMultipleSamplesAtValidation: boolean; - FDataAugmentation: boolean; - FVerbose: boolean; + FInertia: single; FStaircaseEpochs: integer; FStepSize: integer; + FMaxEpochs: integer; + FMultipleSamplesAtValidation: boolean; + FVerbose: boolean; FLearningRateDecay: single; FInitialLearningRate: single; FCyclicalLearningRateLen: integer; FInitialEpoch: integer; - FMaxEpochs: integer; FMinLearnRate: single; FCurrentLearningRate: single; - FInertia: single; FL2Decay: TNeuralFloat; + FLogEveryBatches: integer; FFileNameBase: string; FClipDelta: single; FTargetAccuracy: single; - FCustomLearningRateScheduleFn: TCustomLearningRateScheduleFn; - FCustomLearningRateScheduleObjFn: TCustomLearningRateScheduleObjFn; FOnAfterStep, FOnAfterEpoch, FOnStart: TNotifyEvent; FRunning, FShouldQuit: boolean; FTrainingAccuracy, FValidationAccuracy, FTestAccuracy: TNeuralFloat; FMinBackpropagationError: TNeuralFloat; + FMinBackpropagationErrorProportion: TNeuralFloat; FLoadBestAdEnd: boolean; FTestBestAtEnd: boolean; {$IFDEF OpenCL} @@ -123,11 +125,12 @@ TNeuralFitBase = class(TMObject) property InitialEpoch: integer read FInitialEpoch write FInitialEpoch; property InitialLearningRate: single read FInitialLearningRate write FInitialLearningRate; property LearningRateDecay: single read FLearningRateDecay write FLearningRateDecay; - property MinLearnRate : single read FMinLearnRate write FMinLearnRate; property LoadBestAtEnd: boolean read FLoadBestAdEnd write FLoadBestAdEnd; + property LogEveryBatches: integer read FLogEveryBatches write FLogEveryBatches; property L2Decay: single read FL2Decay write FL2Decay; property MaxThreadNum: integer read FMaxThreadNum write FMaxThreadNum; property MinBackpropagationError: TNeuralFloat read FMinBackpropagationError write FMinBackpropagationError; + property MinBackpropagationErrorProportion: TNeuralFloat read FMinBackpropagationErrorProportion write FMinBackpropagationErrorProportion; property Momentum: single read FInertia write FInertia; property MultipleSamplesAtValidation: boolean read FMultipleSamplesAtValidation write FMultipleSamplesAtValidation; property NN: TNNet read FNN; @@ -207,6 +210,7 @@ TNeuralDataLoadingFit = class(TNeuralFitWithImageBase) procedure EnableBipolar99HitComparison(); procedure EnableClassComparison(); procedure EnableDefaultImageTreatment(); override; + procedure EnableDefaultLoss; // On most cases, you should never call the following methods directly procedure RunNNThread(index, threadnum: integer); @@ -657,7 +661,7 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, FTrainingAccuracy := AccuracyWithInertia/100; end; - if ( (FGlobalTotal > 0) and (I mod 10 = 0) ) then + if ( (FGlobalTotal > 0) and (I mod FLogEveryBatches = 0) ) then begin totalTimeSeconds := (Now() - startTime) * 24 * 60 * 60; if FVerbose then MessageProc @@ -803,9 +807,9 @@ procedure TNeuralDataLoadingFit.FitLoading(pNN: TNNet; TrainingCnt, AssignFile(CSVFile, FileNameCSV); Append(CSVFile); - MessageProc( - 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(TrainingCnt/(FStepSize*10))/60,ffFixed,1,4)+' minutes.' + - ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(TrainingCnt/(FStepSize*10))/3600,ffFixed,1,4)+' hours.'); + MessageProc( + 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(TrainingCnt/(FStepSize*FLogEveryBatches))/60,ffFixed,1,4)+' minutes.' + + ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(TrainingCnt/(FStepSize*FLogEveryBatches))/3600,ffFixed,1,4)+' hours.'); MessageProc( 'Epochs: '+IntToStr(FCurrentEpoch)+ @@ -1062,8 +1066,14 @@ procedure TNeuralDataLoadingFit.RunNNThread(index, threadnum: integer); LocalErrorSum := LocalErrorSum + CurrentError; if (CurrentError > FMinBackpropagationError) or - (CurrentError > FCurrentTrainingError/4) - then LocalNN.Backpropagate( vOutput ); + ( + (FCurrentTrainingError>0) and + (CurrentError > FCurrentTrainingError*FMinBackpropagationErrorProportion) + ) + then + begin + LocalNN.Backpropagate( vOutput ); + end; CurrentLoss := 0; if Assigned(FLossFn) then @@ -1449,11 +1459,16 @@ procedure TNeuralDataLoadingFit.EnableClassComparison(); FInferHitFn := {$IFDEF FPC}@{$ENDIF}ClassCompare; end; +procedure TNeuralDataLoadingFit.EnableDefaultLoss(); +begin + FLossFn := {$IFDEF FPC}@{$ENDIF}DefaultLossFn; +end; + procedure TNeuralDataLoadingFit.EnableDefaultImageTreatment(); begin inherited EnableDefaultImageTreatment(); EnableClassComparison(); - FLossFn := {$IFDEF FPC}@{$ENDIF}DefaultLossFn; + EnableDefaultLoss(); end; { TNeuralFitBase } @@ -1489,6 +1504,7 @@ constructor TNeuralFitBase.Create(); FCyclicalLearningRateLen := 0; // not cyclical by default. FInitialEpoch := 0; FMinBackpropagationError := 0; + FMinBackpropagationErrorProportion := 0.25; fMinLearnRate := FInitialLearningRate * 0.01; FInertia := 0.9; FClipDelta := 0.0; @@ -1509,6 +1525,7 @@ constructor TNeuralFitBase.Create(); FCurrentStep := 0; FLoadBestAdEnd := True; FTestBestAtEnd := True; + FLogEveryBatches := 10; end; destructor TNeuralFitBase.Destroy(); @@ -1628,6 +1645,7 @@ constructor TNeuralImageFit.Create(); FIsSoftmax := true; FMaxCropSize := 8; FMinBackpropagationError := 0.2; + FMinBackpropagationErrorProportion := 0.25; FMultipleSamplesAtValidation := true; FTrainingSampleProcessedCnt := TNNetVolume.Create; end; @@ -1774,7 +1792,9 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; ' Batch size:' + IntToStr(FBatchSize) + ' Step size:' + IntToStr(FStepSize) + ' Staircase epochs:' + IntToStr(FStaircaseEpochs) + - ' Min backprop error:' + FloatToStrF(MinBackpropagationError,ffFixed,4,2) + ' Min backprop error and proportion:' + + FloatToStrF(FMinBackpropagationError,ffFixed,4,2)+' '+ + FloatToStrF(FMinBackpropagationErrorProportion,ffFixed,4,2) ); if Assigned(FImgVolumes) then MessageProc('Training images: '+IntToStr(FImgVolumes.Count)); if Assigned(FImgValidationVolumes) then MessageProc('Validation images: '+IntToStr(FImgValidationVolumes.Count)); @@ -1861,7 +1881,7 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; FTrainingAccuracy := AccuracyWithInertia/100; end; - if ( (FGlobalTotal > 0) and (I mod 10 = 0) ) then + if ( (FGlobalTotal > 0) and (I mod FLogEveryBatches = 0) ) then begin totalTimeSeconds := (Now() - startTime) * 24 * 60 * 60; if FVerbose then MessageProc @@ -2027,8 +2047,8 @@ procedure TNeuralImageFit.Fit(pNN: TNNet; Append(CSVFile); MessageProc( - 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(pImgVolumes.Count/(FStepSize*10))/60,ffFixed,1,4)+' minutes.' + - ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(pImgVolumes.Count/(FStepSize*10))/3600,ffFixed,1,4)+' hours.'); + 'Epoch time: ' + FloatToStrF( totalTimeSeconds*(pImgVolumes.Count/(FStepSize*FLogEveryBatches))/60,ffFixed,1,4)+' minutes.' + + ' '+IntToStr(Epochs)+' epochs: ' + FloatToStrF( Epochs*totalTimeSeconds*(pImgVolumes.Count/(FStepSize*FLogEveryBatches))/3600,ffFixed,1,4)+' hours.'); MessageProc( 'Epochs: '+IntToStr(FCurrentEpoch)+ @@ -2199,8 +2219,12 @@ procedure TNeuralImageFit.RunNNThread(index, threadnum: integer); OutputValue := Max(OutputValue, 0.001); end; - if (CurrentError>FMinBackpropagationError) or - (CurrentError>FCurrentTrainingError/4) then + if + (CurrentError > FMinBackpropagationError) or + ( + (FCurrentTrainingError>0) and + (CurrentError > FCurrentTrainingError*FMinBackpropagationErrorProportion) + ) then begin LocalNN.Backpropagate(vOutput); end @@ -2520,6 +2544,7 @@ procedure TNeuralFitWithImageBase.EnableDefaultImageTreatment(); FHasMakeGray := True; FMaxCropSize := 8; FMinBackpropagationError := 0.2; + FMinBackpropagationErrorProportion := 0.25; FMultipleSamplesAtValidation := True; end; diff --git a/neural/neuralnetwork.pas b/neural/neuralnetwork.pas index 9b18de7c..1189c8e2 100644 --- a/neural/neuralnetwork.pas +++ b/neural/neuralnetwork.pas @@ -21,43 +21,7 @@ (* // coded, adapted and ported by Joao Paulo Schwarz Schuler -// https://sourceforge.net/p/cai/ ----------------------------------------------- -You can find simple to understand examples at: -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/supersimple/ -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/supersimplecorrelation/ ----------------------------------------------- -There are CIFAR-10 examples at: -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/testcnnalgo/testcnnalgo.lpr -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/visualCifar10BatchUpdate/ -https://sourceforge.net/p/cai/svncode/HEAD/tree/trunk/lazarus/experiments/visualCifar10OpenCL/ ----------------------------------------------- -Example - How to Create Your Network -NumClasses := 10; -NN := TNNet.Create(); -NN.AddLayer( TNNetInput.Create(32,32,3) ); -NN.AddLayer( TNNetConvolutionReLU.Create( 16,5,0,0) ); -NN.AddLayer( TNNetMaxPool.Create(2) ); -NN.AddLayer( TNNetConvolutionReLU.Create(128,5,0,0) ); -NN.AddLayer( TNNetMaxPool.Create(2) ); -NN.AddLayer( TNNetConvolutionReLU.Create(128,5,0,0) ); -NN.AddLayer( TNNetLayerFullConnectReLU.Create(64) ); -NN.AddLayer( TNNetLayerFullConnect.Create(NumClasses) ); -NN.SetLearningRate(0.01,0.8); ----------------------------------------------- -Example - How to create a simple fully forward connected network 3x3 -NN := TNNet.Create(); -NN.AddLayer( TNNetInput.Create(3) ); -NN.AddLayer( TNNetLayerFullConnectReLU.Create(3) ); -NN.AddLayer( TNNetLayerFullConnectReLU.Create(3) ); -NN.SetLearningRate(0.01,0.8); ----------------------------------------------- -Example - How to Train Your Network -// InputVolume and vDesiredVolume are of the type TNNetVolume -NN.Compute(InputVolume); -NN.GetOutput(PredictedVolume); -vDesiredVolume.SetClassForReLU(DesiredClass); -NN.Backpropagate(vDesiredVolume); +// https://github.com/joaopauloschuler/neural-api/ ---------------------------------------------- Interesting links: http://cs.stanford.edu/people/karpathy/convnetjs/demo/cifar10.html @@ -84,23 +48,32 @@ interface {$IFDEF FPC} fgl, {$ENDIF} + {$IFDEF THREADSAVERANDOM}RandomEng, {$ENDIF} Classes, SysUtils, math, syncobjs, neuralvolume, neuralgeneric, neuralbyteprediction, neuralcache, neuralab; const csMaxInterleavedSize: integer = 95; + csNNetMaxParameterIdx = 7; type + TNNet = class; + TNNetLayer = class; + { TNNetNeuron } TNNetNeuron = class (TMObject) protected FWeights: TNNetVolume; FBackInertia: TNNetVolume; + FBackInertia2: TNNetVolume; FDelta: TNNetVolume; + FDelta2: TNNetVolume; + FParentLayer: TNNetLayer; private FBiasWeight: TNeuralFloat; FBiasInertia: TNeuralFloat; + FBiasInertia2: TNeuralFloat; FBiasDelta: TNeuralFloat; public constructor Create(); @@ -109,6 +82,9 @@ TNNetNeuron = class (TMObject) procedure Fill(Value:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} procedure AddInertia(); {$IFDEF Release} inline; {$ENDIF} procedure UpdateWeights(Inertia:TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} + procedure UpdateWeightsWithoutInertia(); {$IFDEF Release} inline; {$ENDIF} + procedure CalcAdamDelta(); + procedure UpdateWeightsAdam(); {$IFDEF Release} inline; {$ENDIF} function SaveToString(): string; procedure LoadFromString(strData: string); procedure ClearDelta; {$IFDEF Release} inline; {$ENDIF} @@ -136,6 +112,8 @@ TNNetNeuron = class (TMObject) procedure InitHeGaussianDepthwise(Value: TNeuralFloat = 1); // Weight Initializer for SELU activation function. procedure InitSELU(Value: TNeuralFloat = 1); + // Memory Initializer for Adam Optimizer + procedure InitAdam(ParentLayer: TNNetLayer); property Weights: TNNetVolume read FWeights; property Bias: TNeuralFloat read FBiasWeight; @@ -165,11 +143,6 @@ TNNetNeuronList = class (TNNetList) procedure InitForDebug(); end; - const - csNNetMaxParameterIdx = 7; - - type - TNNet = class; /// neural network layer TNNetLayer = class(TMObject) protected @@ -187,12 +160,17 @@ TNNetLayer = class(TMObject) FSuppressBias: integer; // Fast access to TNNetNeuron FArrNeurons: array of TNNetNeuron; - FInertia: TNeuralFloat; FPrevLayer: TNNetLayer; FLearningRate: TNeuralFloat; FL2Decay: TNeuralFloat; + // Adam settings + FBeta1, FBeta2, FEpsilon: TNeuralFloat; + FBeta1Decay, FBeta2Decay: TNeuralFloat; + FOneMinusBeta1Decay, FOneMinusBeta2Decay: TNeuralFloat; + FStruct: array [0..csNNetMaxParameterIdx] of integer; + FFloatSt: array [0..csNNetMaxParameterIdx] of TNeuralFloat; //backpropagation properties FDepartingBranchesCnt: integer; @@ -261,6 +239,7 @@ TNNetLayer = class(TMObject) function ForceMaxAbsoluteDelta(vMax: TNeuralFloat): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function ForceMaxAbsoluteWeight(vMax: TNeuralFloat): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetMaxAbsoluteDelta(): TNeuralFloat; virtual; + procedure NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); procedure GetMinMaxAtDepth(pDepth: integer; var pMin, pMax: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} // Returns the sum of all weights from all neurons in the layer. function GetWeightSum(): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} @@ -301,6 +280,8 @@ TNNetLayer = class(TMObject) function SaveStructureToString(): string; virtual; procedure SetBatchUpdate(pBatchUpdate: boolean); {$IFDEF Release} inline; {$ENDIF} procedure UpdateWeights(); {$IFDEF Release} inline; {$ENDIF} + procedure CalcAdamDelta(); {$IFDEF Release} inline; {$ENDIF} + procedure UpdateWeightsAdam(); {$IFDEF Release} inline; {$ENDIF} function InitBasicPatterns(): TNNetLayer; // Increments an internal counter that counts how many branches load @@ -338,6 +319,9 @@ TNNetLayer = class(TMObject) function InitGlorotBengioUniform(Value: TNeuralFloat = 1): TNNetLayer; // Weight Initializer for SELU activation function. function InitSELU(Value: TNeuralFloat = 1): TNNetLayer; + // Memory Initializer for Adam optimizer + function InitAdam(Beta1, Beta2, Epsilon: TNeuralFloat): TNNetLayer; + procedure InitDefault(); virtual; property ActivationFn: TNeuralActivationFunction read FActivationFn write FActivationFn; @@ -357,6 +341,11 @@ TNNetLayer = class(TMObject) property BackwardTime: double read FBackwardTime write FBackwardTime; property ForwardTime: double read FForwardTime write FForwardTime; property LinkedNeurons: boolean read FLinkedNeurons; + {$IFDEF OpenCL} + property HasOpenCL: boolean read FHasOpenCL; + property ShouldOpenCL:boolean read FShouldOpenCL; + {$ENDIF} + end; TNNetLayerClass = class of TNNetLayer; @@ -416,6 +405,24 @@ TNNetInput = class(TNNetInputBase) function DisableErrorCollection: TNNetInput; end; + // This layer transposes the X and Depth axis. + TNNetTransposeXD = class(TNNetLayer) + private + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + procedure Compute(); override; + procedure Backpropagate(); override; + end; + + // This layer transposes the Y and Depth axis. + TNNetTransposeYD = class(TNNetLayer) + private + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// This layer copies the input to the output and can be used as a base class // to your new layers. TNNetIdentity = class(TNNetLayer) @@ -442,11 +449,42 @@ TNNetPad = class(TNNetLayer) FPadding: integer; procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; public + constructor Create; overload; override; constructor Create(Padding: integer); reintroduce; overload; procedure Compute(); override; procedure Backpropagate(); override; end; + /// Padding layer: adds padding to the input. + // This layer is similar to TNNetPad except that it allows you to add distinct + // paddings to X and Y. + // This layer has no trainable parameter. Adding a padding layer may be + // more efficient than padding at the convolutional layer. + TNNetPadXY = class(TNNetLayer) + private + FPaddingX, FPaddingY: integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(PaddingX, PaddingY: integer); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + + { TNNetCrop } + + TNNetCrop = class(TNNetLayer) + private + FStartX, FStartY: integer; + FLenX, FLenY: integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(StartX, StartY, LenX, LenY: integer); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// Base class to be used with layers that aren't compatible with L2 TNNetIdentityWithoutL2 = class(TNNetIdentity) private @@ -489,6 +527,13 @@ TNNetReLU = class(TNNetReLUBase) procedure Compute(); override; end; + /// This is almost the same as ReLU except that it doesn't + // backpropagate on zero values (Positive only) + TNNetReLUP = class(TNNetReLUBase) + public + procedure Compute(); override; + end; + /// This is a leaky ReLU with minimum and maximum values. You can // scale leakiness via the Leaky parameter. TNNetReLUL = class(TNNetReLUBase) @@ -593,7 +638,7 @@ TNNetHyperbolicTangent = class(TNNetSigmoid) // learning but can also provoke overflows. TNNetMulLearning = class(TNNetIdentity) public - constructor Create(pMul: integer); reintroduce; overload; + constructor Create(pMul: TNeuralFloat); reintroduce; overload; procedure Backpropagate(); override; end; @@ -617,6 +662,61 @@ TNNetAddAndDiv = class(TNNetIdentity) procedure Compute(); override; end; + { TNNetAddPositionalEmbedding } + // Adds positional embedding as per paper "Attention Is All You Need". + // https://arxiv.org/abs/1706.03762 . + TNNetAddPositionalEmbedding = class(TNNetIdentity) + private + FPositionalEmbedding: TNNetVolume; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(n: integer); reintroduce; overload; + destructor Destroy(); override; + + procedure Compute(); override; + end; + + { TNNetEmbedding } + // Do not use this layer. It's under construction. + TNNetEmbedding = class(TNNetLayer) + private + FVocabSize: integer; + FEmbeddingSize: integer; + FScaleEmbedding: TNeuralFloat; + FEncodeZero: boolean; + FInputTokens: array of integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create; overload; override; + constructor Create(pVocabSize, pEmbeddingSize: integer; + EncodeZero: integer = 0; ScaleEmbedding: TNeuralFloat = 2); reintroduce; overload; + destructor Destroy; override; + + procedure InitDefault(); override; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + + { TNNetTokenAndPositionalEmbedding } + // Do not use this layer. It's under construction. + TNNetTokenAndPositionalEmbedding = class(TNNetEmbedding) + private + FPositionalEmbedding: TNNetVolume; + FPositionalEmbeddingN: integer; + FScalePositional: TNeuralFloat; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create(pVocabSize, pEmbeddingSize: integer; + EncodeZero: integer = 0; + ScaleEmbedding: TNeuralFloat = 2; + ScalePositional: TNeuralFloat = 1; + PositionalEmbeddingN: integer = 0); + destructor Destroy; override; + + procedure Compute(); override; + end; + TNNetAddNoiseBase = class(TNNetIdentity) protected FEnabled: boolean; @@ -624,9 +724,21 @@ TNNetAddNoiseBase = class(TNNetIdentity) property Enabled:boolean read FEnabled write FEnabled; end; + TNNetAddNoiseBaseRnd = class(TNNetAddNoiseBase) + private + {$IFDEF THREADSAVERANDOM} + fRandEng : TRandomGenerator; + {$ENDIF} + protected + function Random(range : integer) : integer; + public + constructor Create; override; + destructor Destroy; override; + end; + /// Dropout layer. The input parameter is the dropout rate (rate of values // that are zeroed). - TNNetDropout = class(TNNetAddNoiseBase) + TNNetDropout = class(TNNetAddNoiseBaseRnd) protected FRate: integer; FDropoutMask: TNNetVolume; @@ -646,7 +758,7 @@ TNNetDropout = class(TNNetAddNoiseBase) // randomly. Parameter 10 means changes with up to 1%. Parameter 1 // means 0.1% and 0 means no change. This layer was create to prevent // overfitting and force generalization. - TNNetRandomMulAdd = class(TNNetAddNoiseBase) + TNNetRandomMulAdd = class(TNNetAddNoiseBaseRnd) protected FRandomBias, FRandomMul: TNeuralFloat; public @@ -657,7 +769,7 @@ TNNetRandomMulAdd = class(TNNetAddNoiseBase) /// This layers adds a small random bias (shift) and small // random multiplication (scaling). - TNNetChannelRandomMulAdd = class(TNNetAddNoiseBase) + TNNetChannelRandomMulAdd = class(TNNetAddNoiseBaseRnd) protected FRandomBias, FRandomMul: TNNetVolume; public @@ -699,8 +811,21 @@ TNNetMovingStdNormalization = class(TNNetIdentityWithoutL2) function GetMaxAbsoluteDelta(): TNeuralFloat; override; end; + // This layer is experimental. Do not use. + TNNetMovingScale = class(TNNetIdentityWithoutL2) + private + FChangeRate: TNeuralFloat; + FMaxTarget: TNeuralFloat; + public + constructor Create; overload; override; + constructor Create(pMaxTarget: TNeuralFloat; pChangeRate: TNeuralFloat); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + procedure InitDefault(); override; + end; + // This is an experimental layer. Do not use it. - TNNetScaleLearning = class(TNNetMovingStdNormalization) + TNNetScaleLearning = class(TNNetIdentity) public procedure Compute(); override; procedure Backpropagate(); override; @@ -909,6 +1034,27 @@ TNNetSum = class(TNNetConcatBase) procedure Backpropagate(); override; end; + /// This layer is under construction. DO NOT USE IT. + // This layer run the TNNetVolume.DotProducts for layers A and B. + TNNetDotProducts = class(TNNetLayer) + private + FA: TNNetLayer; // Layer A + FB: TNNetLayer; // Layer B + FAError: TNNetVolume; // Layer A: Error + FBError: TNNetVolume; // Layer B: Error + FAT: TNNetVolume; // Layer A: Output Transposed + FBT: TNNetVolume; // Layer B: Output Transposed + FET: TNNetVolume; // Error Transposed + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + public + constructor Create(A, B: TNNetLayer); reintroduce; overload; + constructor Create(AIdx, BIdx: integer); reintroduce; overload; + destructor Destroy(); override; + + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// picks/splits from previous layer selected channels. TNNetSplitChannels = class(TNNetLayer) private @@ -957,8 +1103,7 @@ TNNetFullConnect = class(TNNetLayerConcatedWeights) //FullyConnectedLayers TNNetFullConnectClass = class of TNNetFullConnect; - /// Fully connected layer without activation function. This layer is useful - // before softmax layers. + /// Fully connected layer without activation function. TNNetFullConnectLinear = class(TNNetFullConnect) private procedure ComputePreviousLayerErrorCPU(); override; @@ -977,6 +1122,7 @@ TNNetFullConnectSigmoid = class(TNNetFullConnect) end; /// Fully connected layer with ReLU. + // This layer is useful before softmax layers. TNNetFullConnectReLU = class(TNNetFullConnectLinear) private procedure ComputePreviousLayerErrorCPU(); override; @@ -999,8 +1145,22 @@ TNNetFullConnectDiff = class(TNNetFullConnectReLU) procedure Backpropagate(); override; end; + // Pointwise softmax operation. + TNNetPointwiseSoftMax = class(TNNetIdentity) + protected + FSkipBackpropDerivative: boolean; + public + // Although skipping the derivative calculation is a non standard usage, + // skipping the derivative can give higher classification accuracy at + // image classification tasks with 10x smaller learning rate. + constructor Create; overload; override; + constructor Create(SkipBackpropDerivative: integer); reintroduce; overload; + procedure Compute(); override; + procedure Backpropagate(); override; + end; + /// Common softmax layer. - TNNetSoftMax = class(TNNetIdentity) + TNNetSoftMax = class(TNNetPointwiseSoftMax) protected FSoftTotalSum: TNeuralFloat; public @@ -1106,8 +1266,13 @@ TNNetConvolutionClass = class of TNNetConvolutionBase; /// This layer is under construction. DO NOT USE IT. TNNetGroupedConvolutionLinear = class(TNNetConvolutionBase) private - FArrGroupId: array of integer; - FArrGroupIdStart: array of integer; + FArrPrevLayerGroupId: array of integer; + FArrPrevLayerGroupIdStart: array of integer; + FOutputGroupId: array of integer; + FOutputGroupIdStart: array of integer; + FGroupIdToPrevLayerIdStart: array of integer; + FGroupIdToOutputIdStart: array of integer; + FMaxPrevX, FMaxPrevY: integer; procedure PrepareInputForGroupedConvolutionFast(); procedure ComputeCPU(); @@ -1309,6 +1474,11 @@ TNNetPoolBase = class(TNNetLayer) procedure BackpropagateDefaultStride(); procedure BackpropagateWithStride(); procedure ComputePreviousLayerError(); override; + protected + {$IFDEF THREADSAVERANDOM} + fRandEng : TRandomGenerator; + function Random(range : integer) : integer; + {$ENDIF} public constructor Create(pPoolSize: integer; pStride:integer = 0; pPadding: integer = 0); reintroduce; overload; virtual; destructor Destroy(); override; @@ -1318,12 +1488,31 @@ TNNetPoolBase = class(TNNetLayer) /// DEFAULT CAI maxpool layer. TNNetMaxPool = class(TNNetPoolBase) private - procedure ComputeDefaultStride(); - procedure ComputeWithStride(); + procedure ComputeDefaultStride(); virtual; + procedure ComputeWithStride(); virtual; public procedure Compute(); override; end; + // This layer implements a maxpool that also stores the position + // of the maximum values. + TNNetMaxPoolWithPosition = class(TNNetMaxPool) + private + FLogPosX, FLogPosY: boolean; + FExtraSize: integer; + FPosX, FPosY: array of TNeuralFloat; + FMaxBackpropX: integer; + procedure SetPrevLayer(pPrevLayer: TNNetLayer); override; + procedure ComputeDefaultStride(); override; + procedure ComputeWithStride(); override; + procedure ComputePositions(); + public + constructor Create(pPoolSize: integer; pStride: integer; pPadding: integer; + pLogPosX: integer; pLogPosY: integer; pMaxBackpropX: integer = 0); + destructor Destroy(); override; + procedure Backpropagate(); override; + end; + /// PORTABLE maxpool layer (similar to other APIs) TNNetMaxPoolPortable = class(TNNetMaxPool) private @@ -1445,6 +1634,8 @@ TNNet = class(TMObject) Groups, pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0; ChannelInterleaving: boolean = True): TNNetLayer; + function AddGroupedDotProducts(A,B: TNNetLayer; Groups: integer; ChannelInterleaving: boolean): TNNetLayer; + function AddGroupedPointwiseSoftMax(Groups: integer; ChannelInterleaving: boolean): TNNetLayer; /// AddAutoGroupedPointwiseConv implements // pointwise convolutions of the kEffNet architecture // described on the paper: "Grouped Pointwise Convolutions Significantly @@ -1503,8 +1694,16 @@ TNNet = class(TMObject) function AddAvgMaxPool(pPoolSize: integer; pMaxPoolDropout: TNeuralFloat = 0; pKeepDepth:boolean = false; pAfterLayer: TNNetLayer = nil): TNNetLayer; function AddMinMaxChannel(pAfterLayer: TNNetLayer = nil): TNNetLayer; function AddAvgMaxChannel(pMaxPoolDropout: TNeuralFloat = 0; pKeepDepth:boolean = false; pAfterLayer: TNNetLayer = nil): TNNetLayer; + // Transformers, AddSingleHeadSelfAttention and AddSingleHeadTransformerBlock are under construction - do not use it + procedure AddSingleHeadSelfAttention(out Attended, W: TNNetLayer); + function AddSelfAttention(Heads: integer): TNNetLayer; + function AddSelfAttentionCAI(Heads: integer): TNNetLayer; + procedure AddSingleHeadTransformerBlock(out Result, W: TNNetLayer; HasNorm: boolean = False); + function AddTransformerBlock(Heads: integer; IntermediateDim: integer; HasNorm: boolean = False): TNNetLayer; + function AddTransformerBlockCAI(Heads: integer; IntermediateDim: integer; HasNorm: boolean = False): TNNetLayer; procedure AddToExponentialWeightAverage(NewElement: TNNet; Decay: TNeuralFloat); procedure AddToWeightAverage(NewElement: TNNet; CurrentElementCount: integer); + function GetFirstLayer(): TNNetLayer; // Returns the layer index of the first neuronal layer (layers that have neurons). function GetFirstNeuronalLayerIdx(FromLayerIdx:integer = 0): integer; {$IFDEF Release} inline; {$ENDIF} // Returns the layer index of the first neuronal layer that can process an image as input. @@ -1556,6 +1755,8 @@ TNNet = class(TMObject) procedure SetBatchUpdate(pBatchUpdate: boolean); {$IFDEF Release} inline; {$ENDIF} procedure InitWeights(); procedure UpdateWeights(); {$IFDEF Release} inline; {$ENDIF} + procedure CalcAdamDelta(); + procedure UpdateWeightsAdam(); {$IFDEF Release} inline; {$ENDIF} procedure ClearDeltas(); {$IFDEF Release} inline; {$ENDIF} procedure ResetBackpropCallCurrCnt(); {$IFDEF Release} inline; {$ENDIF} procedure SetL2Decay(pL2Decay: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} @@ -1579,6 +1780,9 @@ TNNet = class(TMObject) function ForceMaxAbsoluteWeight(vMax: TNeuralFloat): TNeuralFloat; {$IFDEF Release} inline; {$ENDIF} function GetMaxAbsoluteDelta(): TNeuralFloat; function NormalizeMaxAbsoluteDelta(NewMax: TNeuralFloat = 0.1): TNeuralFloat; + function NormalizeMinAbsoluteDeltaPerLayer(MinDelta: TNeuralFloat = 0.001): TNeuralFloat; + function NormalizeMinMaxAbsoluteDeltaPerLayer(MinDelta, MaxDelta: TNeuralFloat): TNeuralFloat; + procedure NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); procedure ClearInertia(); {$IFDEF Release} inline; {$ENDIF} procedure ClearBias(); {$IFDEF Release} inline; {$ENDIF} @@ -1764,7 +1968,7 @@ TNNetDataParallelism = class (TNNetList) {$IFDEF OpenCL} procedure DisableOpenCL(); - procedure EnableOpenCL(platform_id: cl_platform_id; device_id: cl_device_id); + procedure EnableOpenCL(platform_id: cl_platform_id; device_id: cl_device_id; maxNumOpenCLThreads : integer = -1); {$ENDIF} end; @@ -1849,145 +2053,906 @@ TEasyBytePredictionViaNNet = class(TBytePredictionViaNNet) destructor Destroy(); override; end; - procedure CompareComputing(NN1, NN2: TNNet); - procedure CompareNNStructure(NN, NN2: TNNet); - procedure TestConvolutionAPI(); - procedure TestDataParallelism(NN: TNNet); - - {$IFDEF OpenCL} - procedure TestConvolutionOpenCL(platform_id: cl_platform_id; device_id: cl_device_id); - procedure TestFullConnectOpenCL(platform_id: cl_platform_id; device_id: cl_device_id); - {$ENDIF} - - procedure RebuildPatternOnPreviousPatterns - ( - Calculated: TNNetVolume; - LocalWeight: TNNetVolume; - PrevLayer: TNNetNeuronList; - PrevStride: integer; - ReLU: boolean = false; - Threshold: TNeuralFloat = 0.5 - ); + procedure CompareComputing(NN1, NN2: TNNet); + procedure CompareNNStructure(NN, NN2: TNNet); + procedure TestConvolutionAPI(); + procedure TestDataParallelism(NN: TNNet); + + {$IFDEF OpenCL} + procedure TestConvolutionOpenCL(platform_id: cl_platform_id; device_id: cl_device_id); + procedure TestFullConnectOpenCL(platform_id: cl_platform_id; device_id: cl_device_id); + {$ENDIF} + + procedure RebuildPatternOnPreviousPatterns + ( + Calculated: TNNetVolume; + LocalWeight: TNNetVolume; + PrevLayer: TNNetNeuronList; + PrevStride: integer; + ReLU: boolean = false; + Threshold: TNeuralFloat = 0.5 + ); + + procedure RebuildNeuronListOnPreviousPatterns + ( + CalculatedLayer: TNNetNeuronList; + CurrentLayer, PrevLayer: TNNetNeuronList; + PrevStride: integer; + ReLU: boolean = false; + Threshold: TNeuralFloat = 0.5 + ); + + // Simple character based NLP function for building a string from characters. + function GenerateStringFromChars(NN: TNNet; InputString: string; oSampler: TNNetSamplerBase = nil): string; overload; + + // Takes a neural network (NN) and an input string, and returns the predicted class as an integer. + function GetClassFromChars(NN: TNNet; InputString: string): integer; + + function GenerateStringFromTokens(NN: TNNet; Dict:TStringListInt; InputString: string; oSampler: TNNetSamplerBase = nil): string; + +implementation + +procedure RebuildPatternOnPreviousPatterns +( + Calculated: TNNetVolume; + LocalWeight: TNNetVolume; + PrevLayer: TNNetNeuronList; + PrevStride: integer; + ReLU: boolean = false; + Threshold: TNeuralFloat = 0.5 +); +var + SizeX, SizeY, Depth: integer; + LocalMaxX, LocalMaxY, LocalMaxD: integer; + LocalCntX, LocalCntY, NeuronIdx: integer; + LocalMultiplier: TNeuralFloat; + PrevMaxX, PrevMaxY, PrevMaxD: integer; + PrevCntX, PrevCntY, PrevCntD: integer; + PrevWeight: TNNetVolume; + PrevWeightValue: TNeuralFloat; + MinWeightAbs: TNeuralFloat; +begin + Depth := PrevLayer[0].Weights.Depth; + SizeX := + PrevLayer[0].Weights.SizeX + + ((LocalWeight.SizeX - 1) * PrevStride); + SizeY := + PrevLayer[0].Weights.SizeY + + ((LocalWeight.SizeY - 1) * PrevStride); + if PrevLayer.Count <> LocalWeight.Depth then + begin + exit; + end; + Calculated.ReSize(SizeX, SizeY, Depth); + Calculated.Fill(0); + LocalMaxX := LocalWeight.SizeX - 1; + LocalMaxY := LocalWeight.SizeY - 1; + LocalMaxD := LocalWeight.Depth - 1; + MinWeightAbs := LocalWeight.GetMaxAbs() * Threshold; + // For each current weight + for LocalCntX := 0 to LocalMaxX do + begin + for LocalCntY := 0 to LocalMaxY do + begin + for NeuronIdx := 0 to LocalMaxD do + begin + LocalMultiplier := LocalWeight[LocalCntX, LocalCntY, NeuronIdx]; + if MinWeightAbs <= Abs(LocalMultiplier) then + begin + // Multiply corresponding weight and add to proper position. + PrevWeight := PrevLayer[NeuronIdx].Weights; + PrevMaxX := PrevWeight.SizeX - 1; + PrevMaxY := PrevWeight.SizeY - 1; + PrevMaxD := PrevWeight.Depth - 1; + for PrevCntX := 0 to PrevMaxX do + begin + for PrevCntY := 0 to PrevMaxY do + begin + for PrevCntD := 0 to PrevMaxD do + begin + PrevWeightValue := PrevWeight[PrevCntX, PrevCntY, PrevCntD]; + if (PrevWeightValue > 0) or Not(ReLU) then + Calculated.Add + ( + (LocalCntX * PrevStride) + PrevCntX, + (LocalCntY * PrevStride) + PrevCntY, + PrevCntD, + LocalMultiplier * PrevWeightValue + ); + end; + end; + end; // PrevCntX + end; //if LocalMultiplier > 0 + end; + end; + end; // LocalCntX +end; + +procedure RebuildNeuronListOnPreviousPatterns +( + CalculatedLayer: TNNetNeuronList; + CurrentLayer, PrevLayer: TNNetNeuronList; + PrevStride: integer; + ReLU: boolean = false; + Threshold: TNeuralFloat = 0.5 +); +var + NeuronCnt: integer; +begin + if CurrentLayer.Count <> CalculatedLayer.Count then + begin + WriteLn( + 'Sizes differ. Current layer: ', CurrentLayer.Count, + ' Calc layer: ', CalculatedLayer.Count + ); + exit; + end; + + for NeuronCnt := 0 to CurrentLayer.Count - 1 do + begin + RebuildPatternOnPreviousPatterns + ( + {Calculated=}CalculatedLayer[NeuronCnt].Weights, + {LocalWeight=}CurrentLayer[NeuronCnt].Weights, + {PrevLayer=}PrevLayer, + {PrevStride=}PrevStride, + {ReLU=}ReLU, + {Threshold=}Threshold + ); + end; +end; + +function GenerateStringFromChars(NN: TNNet; InputString: string; + oSampler: TNNetSamplerBase): string; +var + InputVolume, OutputVolume: TNNetVolume; + NextTokenInt: integer; + NextTokenChar: char; + AB: array [0..0] of byte; +begin + InputVolume := TNNetVolume.Create(NN.GetFirstLayer.Output); + OutputVolume := TNNetVolume.Create(NN.GetLastLayer().Output); + repeat + InputVolume.OneHotEncodingReversed(InputString); + NN.Compute(InputVolume, OutputVolume); + if (OutputVolume.Size = 8) then + begin + OutputVolume.ReadAsBits(AB, 0.5); + NextTokenInt := AB[0]; + end + else + begin + if Assigned(oSampler) + then NextTokenInt := oSampler.GetToken(OutputVolume) + else NextTokenInt := OutputVolume.GetClass(); + end; + NextTokenChar := Char(NextTokenInt); + if NextTokenInt > 1 then InputString := InputString + NextTokenChar; + until (NextTokenInt < 2) or (Length(InputString)>=InputVolume.SizeX); + Result := InputString; + InputVolume.Free; + OutputVolume.Free; +end; + +// Takes a neural network (NN) and an input string, +// and returns the predicted class as an integer. +function GetClassFromChars(NN: TNNet; InputString: string): integer; +var + InputVolume: TNNetVolume; // Declare a variable for the input volume. +begin + // Create a new TNNetVolume based on the output size of the first layer of the neural network. + InputVolume := TNNetVolume.Create(NN.GetFirstLayer.Output); + + // Convert the input string into a one-hot encoded volume, which is the format + // expected by the neural network for processing. + InputVolume.OneHotEncodingReversed(InputString); + + // Run the forward pass of the neural network with the one-hot encoded input. + NN.Compute(InputVolume); + + // After the network has computed the output, retrieve the class with the highest + // probability from the last layer's output. + Result := NN.GetLastLayer().Output.GetClass(); + + // Release the memory allocated for the input volume to prevent memory leaks. + InputVolume.Free; +end; + +function GenerateStringFromTokens(NN: TNNet; Dict: TStringListInt; + InputString: string; oSampler: TNNetSamplerBase): string; +var + InputVolume, OutputVolume: TNNetVolume; + NextTokenInt: integer; + NextTokenStr: string; + Tokens: TNeuralIntegerArray; + TokenCnt: integer; +begin + InputVolume := TNNetVolume.Create(NN.GetFirstLayer.Output); + OutputVolume := TNNetVolume.Create(NN.GetLastLayer().Output); + Result := InputString; + Dict.StringToIntegerArray(InputString, Tokens); + TokenCnt := Length(Tokens); + repeat + InputVolume.CopyReversedNoChecksIntArr(Tokens); + NN.Compute(InputVolume, OutputVolume); + if Assigned(oSampler) + then NextTokenInt := oSampler.GetToken(OutputVolume) + else NextTokenInt := OutputVolume.GetClass(); + if NextTokenInt < Dict.Count then + begin + NextTokenStr := Dict.IntegerToWord(NextTokenInt); + Result := Result + ' ' + NextTokenStr; + end; + TokenCnt := TokenCnt + 1; + SetLength(Tokens, TokenCnt); + Tokens[TokenCnt - 1] := NextTokenInt; + until (NextTokenInt < 2) or (TokenCnt>=InputVolume.SizeX); + SetLength(Tokens, 0); + InputVolume.Free; + OutputVolume.Free; +end; + +{ TNNetMovingScale } + +constructor TNNetMovingScale.Create(pMaxTarget: TNeuralFloat; pChangeRate: TNeuralFloat); +begin + inherited Create; + InitDefault(); + FMaxTarget := pMaxTarget; + FChangeRate := pChangeRate; + FFloatSt[0] := pMaxTarget; + FFloatSt[1] := pChangeRate; +end; + +procedure TNNetMovingScale.Compute; +var + StartTime: double; + Multiplier: TNeuralFloat; +begin + StartTime := Now(); + inherited Compute; + Multiplier := FNeurons[0].FWeights.FData[0]; + if Multiplier <= 0.001 then + begin + Multiplier := 0.001; + end; + if (Multiplier<>1) then + begin + FOutput.Mul(Multiplier); + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +constructor TNNetMovingScale.Create; +begin + inherited; + InitDefault(); + FMaxTarget := 1; + FChangeRate := 1; + FFloatSt[0] := FMaxTarget; + FFloatSt[1] := FChangeRate; +end; + +procedure TNNetMovingScale.Backpropagate; +var + StartTime: double; + MaxAbs: TNeuralFloat; + Multiplier, Diff: TNeuralFloat; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + StartTime := Now(); + Multiplier := FNeurons[0].FWeights.FData[0]; + if Multiplier <= 0.001 then + begin + Multiplier := 0.001; + end; + MaxAbs := FOutput.GetMaxAbs(); + if MaxAbs <> 0 then + begin + Diff := FMaxTarget-MaxAbs; + if (Diff < 0) or (Diff>0.75) then + begin + FNeurons[0].FDelta.Add(0, 0, 0, (Diff)*FLearningRate*FChangeRate); + if (not FBatchUpdate) then + begin + FNeurons[0].UpdateWeights(FInertia); + AfterWeightUpdate(); + end; + end; + end; + if (Multiplier > 0) and (Multiplier <> 1) then + begin + FOutputError.Mul(Multiplier); + //if Random(100)=0 then WriteLn(MaxAbs,' ->', Multiplier:10:8); + end; + FBackwardTime := FBackwardTime + (Now() - StartTime); + inherited Backpropagate(); +end; + +procedure TNNetMovingScale.InitDefault; +begin + if FNeurons.Count < 1 then AddMissingNeurons(1); + SetNumWeightsForAllNeurons(1, 1, 1); + FNeurons[0].FWeights.FData[0] := 1; +end; + +{ TNNetDotProducts } + +procedure TNNetDotProducts.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FA := pPrevLayer.NN.Layers[ FStruct[0] ]; + FB := pPrevLayer.NN.Layers[ FStruct[1] ]; + if FA.Output.Depth <> FB.Output.Depth then + begin + FErrorProc( + 'TNNetDotProducts - Depths differ '+ + IntToStr(FA.Output.Depth) + ' ' + + IntToStr(FB.Output.Depth) + '.' + ); + end; + FOutput.ReSize( + FB.Output.SizeX, + FB.Output.SizeY, + FA.Output.SizeX * FA.Output.SizeY + ); + FOutputError.Resize(FOutput); + FOutputErrorDeriv.Resize(FOutput); +end; + +constructor TNNetDotProducts.Create(A, B: TNNetLayer); +begin + Self.Create(A.LayerIdx, B.LayerIdx); +end; + +constructor TNNetDotProducts.Create(AIdx, BIdx: integer); +begin + inherited Create; + FAT := TNNetVolume.Create(); + FBT := TNNetVolume.Create(); + FET := TNNetVolume.Create(); + FAError := TNNetVolume.Create(); + FBError := TNNetVolume.Create(); + + FStruct[0] := AIdx; + FStruct[1] := BIdx; +end; + +destructor TNNetDotProducts.Destroy; +begin + FAError.Free; + FBError.Free; + FET.Free; + FBT.Free; + FAT.Free; + inherited Destroy; +end; + +procedure TNNetDotProducts.Compute; +var + StartTime: double; +begin + StartTime := Now(); + FOutput.DotProductsPointwise(FA.Output, FB.Output); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +procedure TNNetDotProducts.Backpropagate; +var + StartTime: double; +begin + StartTime := Now(); + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + + FAT.CopyTransposingAs2D(FA.Output); + FBT.CopyTransposingAs2D(FB.Output); + FET.CopyTransposingAs2D(FOutputError); + + FAError.DotProductsPointwise(FBT, FET); + FBError.DotProductsPointwise(FAT, FOutputError); + + FA.OutputError.Add(FAError); + FB.OutputError.Add(FBError); + + FBackwardTime := FBackwardTime + (Now() - StartTime); + FB.Backpropagate(); + FA.Backpropagate(); +end; + +{ TNNetPointwiseSoftMax } +constructor TNNetPointwiseSoftMax.Create(SkipBackpropDerivative: integer); +begin + inherited Create(); + FSkipBackpropDerivative := (SkipBackpropDerivative > 0); + FStruct[0] := SkipBackpropDerivative; +end; + +procedure TNNetPointwiseSoftMax.Compute; +var + StartTime: double; +begin + StartTime := Now(); + inherited Compute; + FOutput.PointwiseSoftMax(); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +constructor TNNetPointwiseSoftMax.Create; +begin + inherited; + // default = 0 + FSkipBackpropDerivative := False; + FStruct[0] := 0; +end; + +procedure TNNetPointwiseSoftMax.Backpropagate; +var + StartTime: double; + {$IFDEF Debug} + Min, Max: TNeuralFloat; + {$ENDIF} +begin + StartTime := Now(); + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if Assigned(FPrevLayer) and + (FPrevLayer.OutputError.Size > 0) and + (FPrevLayer.OutputError.Size = FPrevLayer.Output.Size) then + begin + if FSkipBackpropDerivative then + begin + FPrevLayer.OutputError.Add(FOutputError); + end + else + begin + // derivative is: x*(1-x) + // https://eli.thegreenplace.net/2016/the-softmax-function-and-its-derivative/ + // https://github.com/neuroph/neuroph/blob/master/neuroph-2.9/Contrib/src/main/java/org/neuroph/contrib/learning/SoftMax.java + FOutputErrorDeriv.Fill(1); + FOutputErrorDeriv.Sub(FOutput); + FOutputErrorDeriv.Mul(FOutput); + FPrevLayer.OutputError.MulAdd(FOutputError, FOutputErrorDeriv); + end; + end; + {$IFDEF Debug} + Min := FOutputErrorDeriv.GetMin(); + Max := FOutputErrorDeriv.GetMax(); + if Min < 0 then FErrorProc('Softmax derivative is negative: '+FloatToStrF(Min,ffFixed,6,4)); + if Max > 0.25 then FErrorProc('Softmax derivative is bigger than 0.25: '+FloatToStrF(Max,ffFixed,6,4)); + {$ENDIF} + FBackwardTime := FBackwardTime + (Now() - StartTime); + FPrevLayer.Backpropagate(); +end; + +{ TNNetAddPositionalEmbedding } +procedure TNNetAddPositionalEmbedding.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FPositionalEmbedding.ReSize(FOutput); + FPositionalEmbedding.PositionalEncoding(FStruct[0]); +end; + +constructor TNNetAddPositionalEmbedding.Create(n: integer); +begin + inherited Create; + FPositionalEmbedding := TNNetVolume.Create; + if n=0 + then FStruct[0] := 10000 + else FStruct[0] := n; +end; + +constructor TNNetAddPositionalEmbedding.Create; +begin + inherited Create; + FPositionalEmbedding := TNNetVolume.Create; + FStruct[0] := 10000 +end; + +destructor TNNetAddPositionalEmbedding.Destroy; +begin + FPositionalEmbedding.Free; + inherited Destroy; +end; + +procedure TNNetAddPositionalEmbedding.Compute; +begin + inherited Compute; + FOutput.Add(FPositionalEmbedding); +end; + +{ TNNetTransposeYD } + +procedure TNNetTransposeYD.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.Output.SizeX, pPrevLayer.Output.Depth, pPrevLayer.Output.SizeY); + FOutputError.ReSize(pPrevLayer.OutputError.SizeX, pPrevLayer.OutputError.Depth, pPrevLayer.OutputError.SizeY); + FOutputErrorDeriv.ReSize(pPrevLayer.OutputErrorDeriv.SizeX, pPrevLayer.OutputErrorDeriv.Depth, pPrevLayer.OutputErrorDeriv.SizeY); +end; + +procedure TNNetTransposeYD.Compute; +var + StartTime: double; +begin + StartTime := Now(); + FOutput.CopyTransposingYD(FPrevLayer.FOutput); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +procedure TNNetTransposeYD.Backpropagate; +var + StartTime: double; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if Assigned(FPrevLayer) and + (FPrevLayer.OutputError.Size > 0) and + (FPrevLayer.OutputError.Size = FPrevLayer.Output.Size) then + begin + StartTime := Now(); + FPrevLayer.FOutputError.AddTransposingYD(FOutputError); + FBackwardTime := FBackwardTime + (Now() - StartTime); + end; + FPrevLayer.Backpropagate(); +end; + +{ TNNetTransposeXD } + +procedure TNNetTransposeXD.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.Output.Depth, pPrevLayer.Output.SizeY, pPrevLayer.Output.SizeX); + FOutputError.ReSize(pPrevLayer.OutputError.Depth, pPrevLayer.OutputError.SizeY, pPrevLayer.OutputError.SizeX); + FOutputErrorDeriv.ReSize(pPrevLayer.OutputErrorDeriv.Depth, pPrevLayer.OutputErrorDeriv.SizeY, pPrevLayer.OutputErrorDeriv.SizeX); +end; + +procedure TNNetTransposeXD.Compute; +var + StartTime: double; +begin + StartTime := Now(); + FOutput.CopyTransposingXD(FPrevLayer.FOutput); + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +procedure TNNetTransposeXD.Backpropagate; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if Assigned(FPrevLayer) and + (FPrevLayer.OutputError.Size > 0) and + (FPrevLayer.OutputError.Size = FPrevLayer.Output.Size) then + begin + FPrevLayer.FOutputError.AddTransposingXD(FOutputError); + end; + FPrevLayer.Backpropagate(); +end; + +{ TNNetReLUP } + +procedure TNNetReLUP.Compute; +var + SizeM1: integer; + LocalPrevOutput: TNNetVolume; + OutputCnt: integer; + StartTime: double; +begin + StartTime := Now(); + LocalPrevOutput := FPrevLayer.Output; + SizeM1 := LocalPrevOutput.Size - 1; + + if (FOutput.Size = FOutputError.Size) and (FOutputErrorDeriv.Size = FOutput.Size) then + begin + for OutputCnt := 0 to SizeM1 do + begin + if LocalPrevOutput.FData[OutputCnt] > 0 then // Positive Values Only + begin + FOutput.FData[OutputCnt] := LocalPrevOutput.FData[OutputCnt]; + FOutputErrorDeriv.FData[OutputCnt] := 1; + end + else + begin + FOutput.FData[OutputCnt] := 0; + FOutputErrorDeriv.FData[OutputCnt] := 0; + end; + end; + end + else + begin + // can't calculate error on input layers. + for OutputCnt := 0 to SizeM1 do + begin + if LocalPrevOutput.FData[OutputCnt]>0 then + begin + FOutput.FData[OutputCnt] := LocalPrevOutput.FData[OutputCnt]; + end + else + begin + FOutput.FData[OutputCnt] := 0; + end; + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +{ TNNetMaxPoolWithPosition } + +procedure TNNetMaxPoolWithPosition.ComputeDefaultStride; +begin + inherited ComputeDefaultStride(); + ComputePositions(); +end; + +procedure TNNetMaxPoolWithPosition.ComputeWithStride; +begin + inherited ComputeWithStride(); + ComputePositions(); +end; + +procedure TNNetMaxPoolWithPosition.ComputePositions; +var + CntOutputX, CntOutputY, CntD: integer; + OutputMaxX, OutputMaxY, MaxD: integer; + OutputRawPos, PosX, PosY: integer; + PrevDepth: integer; + PositionBlockCnt: integer; +begin + OutputMaxX := Output.SizeX - 1; + OutputMaxY := Output.SizeY - 1; + PrevDepth := FPrevLayer.Output.Depth; + MaxD := PrevDepth - 1; + for CntOutputY := 0 to OutputMaxY do + begin + for CntOutputX := 0 to OutputMaxX do + begin + OutputRawPos := Output.GetRawPos(CntOutputX, CntOutputY); + for CntD := 0 to MaxD do + begin + PosX := FMaxPosX[OutputRawPos]; // Position X + PosY := FMaxPosY[OutputRawPos]; // Position Y + PositionBlockCnt := 0; + if FLogPosX then + begin + Inc(PositionBlockCnt); + FOutput.FData[OutputRawPos + PrevDepth*PositionBlockCnt] := FPosX[PosX]; + end; + if FLogPosY then + begin + Inc(PositionBlockCnt); + FOutput.FData[OutputRawPos + PrevDepth*PositionBlockCnt] := FPosY[PosY]; + end; + Inc(OutputRawPos); + end; + end; + end; +end; + +constructor TNNetMaxPoolWithPosition.Create(pPoolSize: integer; + pStride: integer; pPadding: integer; + pLogPosX: integer; pLogPosY: integer; + pMaxBackpropX: integer = 0); +begin + inherited Create(pPoolSize, pStride, pPadding); + FStruct[3] := pLogPosX; + FStruct[4] := pLogPosY; + FStruct[5] := pMaxBackpropX; + FLogPosX := (pLogPosX>0); + FLogPosY := (pLogPosY>0); + FExtraSize := 0; + FMaxBackpropX := pMaxBackpropX; + if FLogPosX then Inc(FExtraSize); + if FLogPosY then Inc(FExtraSize); +end; + +destructor TNNetMaxPoolWithPosition.Destroy; +begin + SetLength(FPosX, 0); + SetLength(FPosY, 0); + inherited Destroy; +end; + +procedure TNNetMaxPoolWithPosition.Backpropagate; +var + CntOutputX, CntOutputY, CntD: integer; + OutputMaxX, OutputMaxY, MaxD: integer; + OutputRawPos: integer; + PrevDepth: integer; +begin + if FMaxBackpropX < Output.SizeX then + begin + OutputMaxX := Output.SizeX - 1; + OutputMaxY := Output.SizeY - 1; + PrevDepth := FPrevLayer.Output.Depth; + MaxD := PrevDepth - 1; + for CntOutputY := 0 to OutputMaxY do + begin + for CntOutputX := FMaxBackpropX to OutputMaxX do + begin + OutputRawPos := Output.GetRawPos(CntOutputX, CntOutputY); + for CntD := 0 to MaxD do + begin + FOutput.FData[OutputRawPos] := 0; + Inc(OutputRawPos); + end; + end; + end; + end; + inherited Backpropagate; +end; + +procedure TNNetMaxPoolWithPosition.SetPrevLayer(pPrevLayer: TNNetLayer); +var + CntSizeX, CntSizeY: integer; +begin + inherited SetPrevLayer(pPrevLayer); + if FExtraSize > 0 then + begin + FOutput.ReSize(FOutputSizeX, FOutputSizeY, FOutputSizeD * (1+FExtraSize)); + FOutputError.ReSize(FOutputSizeX, FOutputSizeY, FOutputSizeD * (1+FExtraSize)); + FOutputErrorDeriv.ReSize(FOutputSizeX, FOutputSizeY, FOutputSizeD * (1+FExtraSize)); + SetLength(FMaxPosX, FOutput.Size); + SetLength(FMaxPosY, FOutput.Size); + SetLength(FPosX, FPrevLayer.Output.SizeX); + SetLength(FPosY, FPrevLayer.Output.SizeY); + for CntSizeX := 0 to FPrevLayer.Output.SizeX - 1 do + begin + FPosX[CntSizeX] := CntSizeX/FPrevLayer.Output.SizeX; + end; + for CntSizeY := 0 to FPrevLayer.Output.SizeY - 1 do + begin + FPosY[CntSizeY] := CntSizeY/FPrevLayer.Output.SizeY; + end; + if FMaxBackpropX = 0 then + begin + FMaxBackpropX := FOutputSizeX; + end; + FMaxBackpropX := Min(FOutputSizeX, FMaxBackpropX); + FStruct[5] := FMaxBackpropX; + end; +end; + +{ TNNetCrop } + +procedure TNNetCrop.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(FLenX, FLenY, pPrevLayer.FOutput.Depth); + if (pPrevLayer.FOutputError.Size = pPrevLayer.FOutput.Size) then + begin + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); + end; +end; + +constructor TNNetCrop.Create(StartX, StartY, LenX, LenY: integer); +begin + inherited Create(); + FStartX := StartX; + FStartY := StartY; + FLenX := Max(LenX, 1); + FLenY := Max(LenY, 1); + FStruct[0] := StartX; + FStruct[1] := StartY; + FStruct[2] := FLenX; + FStruct[3] := FLenY; +end; - procedure RebuildNeuronListOnPreviousPatterns - ( - CalculatedLayer: TNNetNeuronList; - CurrentLayer, PrevLayer: TNNetNeuronList; - PrevStride: integer; - ReLU: boolean = false; - Threshold: TNeuralFloat = 0.5 - ); +procedure TNNetCrop.Compute; +var + StartTime: double; +begin + StartTime := Now(); + if + (FPrevLayer.FOutputError.Size = FPrevLayer.FOutput.Size) and + (FOutput.Size <> FOutputError.Size) + then + begin + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); + end; + FOutput.CopyCropping(FPrevLayer.FOutput, FStartX, FStartY, FLenX, FLenY); + FForwardTime := FForwardTime + (Now() - StartTime); +end; -implementation +constructor TNNetCrop.Create; +begin + raise Exception.Create('Default constructor not allowed here'); + inherited; +end; -procedure RebuildPatternOnPreviousPatterns -( - Calculated: TNNetVolume; - LocalWeight: TNNetVolume; - PrevLayer: TNNetNeuronList; - PrevStride: integer; - ReLU: boolean = false; - Threshold: TNeuralFloat = 0.5 -); +procedure TNNetCrop.Backpropagate; var - SizeX, SizeY, Depth: integer; - LocalMaxX, LocalMaxY, LocalMaxD: integer; - LocalCntX, LocalCntY, NeuronIdx: integer; - LocalMultiplier: TNeuralFloat; - PrevMaxX, PrevMaxY, PrevMaxD: integer; - PrevCntX, PrevCntY, PrevCntD: integer; - PrevWeight: TNNetVolume; - PrevWeightValue: TNeuralFloat; - MinWeightAbs: TNeuralFloat; + StartTime: double; begin - Depth := PrevLayer[0].Weights.Depth; - SizeX := - PrevLayer[0].Weights.SizeX + - ((LocalWeight.SizeX - 1) * PrevStride); - SizeY := - PrevLayer[0].Weights.SizeY + - ((LocalWeight.SizeY - 1) * PrevStride); - if PrevLayer.Count <> LocalWeight.Depth then + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if (FPrevLayer.Output.Size > 0) and (FPrevLayer.Output.Size = FPrevLayer.OutputError.Size) then begin - exit; + StartTime := Now(); + FPrevLayer.FOutputError.AddArea + ( + {DestX=}FStartX, + {DestY=}FStartY, + {OriginX=}0, + {OriginY=}0, + {LenX=}FLenX, + {LenY=}FLenY, + FOutputError + ); + FBackwardTime := FBackwardTime + (Now() - StartTime); end; - Calculated.ReSize(SizeX, SizeY, Depth); - Calculated.Fill(0); - LocalMaxX := LocalWeight.SizeX - 1; - LocalMaxY := LocalWeight.SizeY - 1; - LocalMaxD := LocalWeight.Depth - 1; - MinWeightAbs := LocalWeight.GetMaxAbs() * Threshold; - // For each current weight - for LocalCntX := 0 to LocalMaxX do + if Assigned(FPrevLayer) then FPrevLayer.Backpropagate(); +end; + +{ TNNetPadXY } + +procedure TNNetPadXY.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.FOutput.SizeX + FPaddingX*2, pPrevLayer.FOutput.SizeY + FPaddingY*2, pPrevLayer.FOutput.Depth); + if (pPrevLayer.FOutputError.Size = pPrevLayer.FOutput.Size) then begin - for LocalCntY := 0 to LocalMaxY do - begin - for NeuronIdx := 0 to LocalMaxD do - begin - LocalMultiplier := LocalWeight[LocalCntX, LocalCntY, NeuronIdx]; - if MinWeightAbs <= Abs(LocalMultiplier) then - begin - // Multiply corresponding weight and add to proper position. - PrevWeight := PrevLayer[NeuronIdx].Weights; - PrevMaxX := PrevWeight.SizeX - 1; - PrevMaxY := PrevWeight.SizeY - 1; - PrevMaxD := PrevWeight.Depth - 1; - for PrevCntX := 0 to PrevMaxX do - begin - for PrevCntY := 0 to PrevMaxY do - begin - for PrevCntD := 0 to PrevMaxD do - begin - PrevWeightValue := PrevWeight[PrevCntX, PrevCntY, PrevCntD]; - if (PrevWeightValue > 0) or Not(ReLU) then - Calculated.Add - ( - (LocalCntX * PrevStride) + PrevCntX, - (LocalCntY * PrevStride) + PrevCntY, - PrevCntD, - LocalMultiplier * PrevWeightValue - ); - end; - end; - end; // PrevCntX - end; //if LocalMultiplier > 0 - end; - end; - end; // LocalCntX + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); + end; end; -procedure RebuildNeuronListOnPreviousPatterns -( - CalculatedLayer: TNNetNeuronList; - CurrentLayer, PrevLayer: TNNetNeuronList; - PrevStride: integer; - ReLU: boolean = false; - Threshold: TNeuralFloat = 0.5 -); +constructor TNNetPadXY.Create(PaddingX, PaddingY: integer); +begin + inherited Create(); + FStruct[0] := PaddingX; + FStruct[1] := PaddingY; + FPaddingX := PaddingX; + FPaddingY := PaddingY; +end; + +procedure TNNetPadXY.Compute; var - NeuronCnt: integer; + StartTime: double; begin - if CurrentLayer.Count <> CalculatedLayer.Count then + StartTime := Now(); + if + (FPrevLayer.FOutputError.Size = FPrevLayer.FOutput.Size) and + (FOutput.Size <> FOutputError.Size) + then begin - WriteLn( - 'Sizes differ. Current layer: ', CurrentLayer.Count, - ' Calc layer: ', CalculatedLayer.Count - ); - exit; + FOutputError.ReSize(FOutput); + FOutputErrorDeriv.ReSize(FOutput); end; + FOutput.CopyPadding(FPrevLayer.FOutput, FPaddingX, FPaddingY); + FForwardTime := FForwardTime + (Now() - StartTime); +end; - for NeuronCnt := 0 to CurrentLayer.Count - 1 do +constructor TNNetPadXY.Create; +begin + raise Exception.Create('Default constructor not allowed here'); + inherited; +end; + +procedure TNNetPadXY.Backpropagate; +var + StartTime: double; +begin + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + if (FPrevLayer.Output.Size > 0) and (FPrevLayer.Output.Size = FPrevLayer.OutputError.Size) then begin - RebuildPatternOnPreviousPatterns + StartTime := Now(); + FPrevLayer.FOutputError.AddArea ( - {Calculated=}CalculatedLayer[NeuronCnt].Weights, - {LocalWeight=}CurrentLayer[NeuronCnt].Weights, - {PrevLayer=}PrevLayer, - {PrevStride=}PrevStride, - {ReLU=}ReLU, - {Threshold=}Threshold + {DestX=}0, + {DestY=}0, + {OriginX=}FPaddingX, + {OriginY=}FPaddingY, + {LenX=}FPrevLayer.OutputError.SizeX, + {LenY=}FPrevLayer.OutputError.SizeY, + FOutputError ); + FBackwardTime := FBackwardTime + (Now() - StartTime); end; + if Assigned(FPrevLayer) then FPrevLayer.Backpropagate(); end; { TNNetGroupedPointwiseConvHardSwish } @@ -2082,32 +3047,21 @@ constructor TNNetConvolutionSwish.Create(pNumFeatures, pFeatureSize, procedure TNNetScaleLearning.Compute(); begin - FOutput.CopyNoChecks(FPrevLayer.FOutput); + inherited Compute(); end; procedure TNNetScaleLearning.Backpropagate(); var StartTime: double; - MagnitudeDelta: TNeuralFloat; Magnitude: TNeuralFloat; begin Inc(FBackPropCallCurrentCnt); if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; StartTime := Now(); - if FNeurons[0].Weights.FData[1] > 1 then - begin - FOutputError.Mul(FNeurons[0].Weights.FData[1]); - end; - Magnitude := FOutput.GetMagnitude(); - MagnitudeDelta := (1-Magnitude); - if (MagnitudeDelta>0) or (FNeurons[0].Weights.FData[1] > 0) then + Magnitude := FOutputError.GetMagnitude(); + if (Magnitude > 0) and (Magnitude < 1) then begin - FNeurons[0].FDelta.Add(0,0,1, NeuronForceRange(MagnitudeDelta, FLearningRate*10) ); - end; - if (not FBatchUpdate) then - begin - FNeurons[0].UpdateWeights(FInertia); - AfterWeightUpdate(); + FOutputError.Mul(1/Magnitude); end; //if Random(100)=0 then WriteLn(MagnitudeDelta:6:4,' - ',FNeurons[0].Weights.FData[1]:6:4); FPrevLayer.FOutputError.Add(FOutputError); @@ -2169,25 +3123,34 @@ constructor TNNetDebug.Create(hasForward, hasBackward: integer); { TNNetDebug } procedure TNNetDebug.Compute(); +var + StartTime: double; begin inherited Compute(); if ((FStruct[0]>0) and (Random(1000)=0)) then begin + StartTime := Now(); Write('Forward:'); FOutput.PrintDebug(); WriteLn; + FForwardTime := FForwardTime + (Now() - StartTime); end; end; procedure TNNetDebug.Backpropagate(); +var + StartTime: double; begin - Inc(FBackPropCallCurrentCnt); - if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + // FBackPropCallCurrentCnt check is inherited done. + //Inc(FBackPropCallCurrentCnt); + //if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; if ((FStruct[1]>0) and (Random(1000)=0)) then begin + StartTime := Now(); Write('Backward:'); FOutputError.PrintDebug(); WriteLn; + FBackwardTime := FBackwardTime + (Now() - StartTime); end; inherited Backpropagate(); end; @@ -2422,7 +3385,9 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); var OutputX, OutputY, OutputD: integer; MaxX, MaxY, MaxD: integer; - (*GroupId, *)GroupDSize, GroupDStart: integer; + OutputGroupDSize: integer; + PrevLayerGroupDSize, PrevLayerGroupDStart: integer; + OutputGroupId: integer; PrevX, PrevY: integer; OutputRawPos: integer; CanBackpropOnPos: boolean; @@ -2453,7 +3418,8 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); MaxD := OutputError.Depth - 1; LocalDestPtr := nil; // Debug code: FOutputError.ForceMaxAbs(1); - GroupDSize := OutputError.Depth div FStruct[5]; + OutputGroupDSize := OutputError.Depth div FStruct[5]; + PrevLayerGroupDSize := FPrevLayer.OutputError.Depth div FStruct[5]; if FPadding > 0 then begin FPrevLayerErrorPadded.Fill(0); @@ -2493,10 +3459,12 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY, StartTileD); for OutputD := StartTileD to EndTileD do begin - //GroupId := FArrGroupId[OutputD]; - GroupDStart := FArrGroupIdStart[OutputD]; + // What is the current group id for OutputD? + OutputGroupId := FOutputGroupId[OutputD]; + // What is the starting point (depth) in the previous layer for this group id? + PrevLayerGroupDStart := FGroupIdToPrevLayerIdStart[OutputGroupId]; if (FCalculatePrevLayerError and CanBackpropOnPos) - then LocalDestPtr := LocalPrevError.GetRawPtr(PrevX, PrevY, GroupDStart); + then LocalDestPtr := LocalPrevError.GetRawPtr(PrevX, PrevY, PrevLayerGroupDStart); {$IFDEF FPC} if FActivationFn = @RectifiedLinearUnit then begin @@ -2529,7 +3497,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); LocalLearningErrorDeriv := (-FLearningRate) * LocalOutputErrorDeriv; if (LocalLearningErrorDeriv <> 0.0) then begin - PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY, GroupDStart); + PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY, PrevLayerGroupDStart); {$IFNDEF AVX64} FArrNeurons[OutputD].Delta.MulAdd(LocalLearningErrorDeriv, PtrPreparedInput); {$ELSE} @@ -2549,7 +3517,7 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); LocalWeight := FArrNeurons[OutputD].Weights; if FPointwise then begin - LocalPrevError.MulAdd(LocalDestPtr, LocalWeight.DataPtr, LocalOutputErrorDeriv, GroupDSize); + LocalPrevError.MulAdd(LocalDestPtr, LocalWeight.DataPtr, LocalOutputErrorDeriv, PrevLayerGroupDSize); end else begin @@ -2561,10 +3529,10 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); begin LocalPrevError.MulAdd ( - LocalPrevError.GetRawPtr(PrevX + LocalCntX, PrevY + LocalCntY, GroupDStart), //PrevPtrA + LocalPrevError.GetRawPtr(PrevX + LocalCntX, PrevY + LocalCntY, PrevLayerGroupDStart), //PrevPtrA LocalWeight.GetRawPtr(LocalCntX, LocalCntY), //PrevPtrB SmoothLocalOutputErrorDeriv, - GroupDSize + OutputGroupDSize ); end; end; @@ -2588,7 +3556,8 @@ procedure TNNetGroupedConvolutionLinear.BackpropagateCPU(); procedure TNNetGroupedConvolutionLinear.SetPrevLayer(pPrevLayer: TNNetLayer); var - GroupDSize: integer; + PrevLayerGroupDSize: integer; + OutputGroupDSize: integer; OutputD: integer; GroupId, GroupDStart: integer; LocalPrevError: TNNetVolume; @@ -2596,18 +3565,34 @@ procedure TNNetGroupedConvolutionLinear.SetPrevLayer(pPrevLayer: TNNetLayer); inherited SetPrevLayer(pPrevLayer); FVectorSize := FFeatureSizeX*FFeatureSizeY*(pPrevLayer.Output.Depth div FStruct[5]); FVectorSizeBytes := FVectorSize * SizeOf(TNeuralFloat); - GroupDSize := pPrevLayer.Output.Depth div FStruct[5]; - SetNumWeightsForAllNeurons(FFeatureSizeX, FFeatureSizeY, GroupDSize); + PrevLayerGroupDSize := pPrevLayer.Output.Depth div FStruct[5]; + OutputGroupDSize := FOutput.Depth div FStruct[5]; + SetNumWeightsForAllNeurons(FFeatureSizeX, FFeatureSizeY, PrevLayerGroupDSize); InitDefault(); AfterWeightUpdate(); - SetLength(FArrGroupId, pPrevLayer.Output.Depth); - SetLength(FArrGroupIdStart, pPrevLayer.Output.Depth); + SetLength(FArrPrevLayerGroupId, pPrevLayer.Output.Depth); + SetLength(FArrPrevLayerGroupIdStart, pPrevLayer.Output.Depth); + SetLength(FOutputGroupId, FOutput.Depth); + SetLength(FOutputGroupIdStart, FOutput.Depth); + SetLength(FGroupIdToPrevLayerIdStart, FStruct[5]); + SetLength(FGroupIdToOutputIdStart, FStruct[5]); + for OutputD := 0 to pPrevLayer.Output.Depth - 1 do begin - GroupId := OutputD div GroupDSize; - GroupDStart := GroupId * GroupDSize; - FArrGroupId[OutputD] := GroupId; - FArrGroupIdStart[OutputD] := GroupDStart; + GroupId := OutputD div PrevLayerGroupDSize; + GroupDStart := GroupId * PrevLayerGroupDSize; + FGroupIdToPrevLayerIdStart[GroupId] := GroupDStart; + FArrPrevLayerGroupId[OutputD] := GroupId; + FArrPrevLayerGroupIdStart[OutputD] := GroupDStart; + end; + + for OutputD := 0 to FOutput.Depth - 1 do + begin + GroupId := OutputD div OutputGroupDSize; + GroupDStart := GroupId * OutputGroupDSize; + FGroupIdToOutputIdStart[GroupId] := GroupDStart; + FOutputGroupId[OutputD] := GroupId; + FOutputGroupIdStart[OutputD] := GroupDStart; end; if FPadding > 0 then @@ -2635,8 +3620,12 @@ constructor TNNetGroupedConvolutionLinear.Create(pNumFeatures, pFeatureSize, destructor TNNetGroupedConvolutionLinear.Destroy(); begin - SetLength(FArrGroupId, 0); - SetLength(FArrGroupIdStart, 0); + SetLength(FArrPrevLayerGroupId, 0); + SetLength(FArrPrevLayerGroupIdStart, 0); + SetLength(FOutputGroupId, 0); + SetLength(FOutputGroupIdStart, 0); + SetLength(FGroupIdToPrevLayerIdStart, 0); + SetLength(FGroupIdToOutputIdStart, 0); inherited Destroy(); end; @@ -2731,7 +3720,7 @@ constructor TNNetNegate.Create(); procedure TNNetMulByConstant.Compute(); begin inherited Compute(); - FOutput.Mul(FStruct[0]); + FOutput.Mul(FFloatSt[0]); end; procedure TNNetCellMulByCell.SetPrevLayer(pPrevLayer: TNNetLayer); @@ -3050,6 +4039,12 @@ procedure TNNetPad.Compute(); FForwardTime := FForwardTime + (Now() - StartTime); end; +constructor TNNetPad.Create; +begin + raise Exception.Create('Default constructor not allowed here'); + inherited; +end; + procedure TNNetPad.Backpropagate(); var StartTime: double; @@ -4027,7 +5022,9 @@ procedure TNNetDepthwiseConv.BackpropagateCPUFast(); {$ENDIF} if MaxNeuronIdx = 0 then begin + {$IFDEF Debug} NeuronIdx := 0; + {$ENDIF} LocalDelta := FArrNeurons[0].Delta; LocalWeight := FArrNeurons[0].Weights; for OutputY := 0 to MaxY do @@ -5228,9 +6225,9 @@ procedure TNNetLayerConcatedWeights.EnableOpenCL( RefreshNeuronWeightList(); AfterWeightUpdate(); - FConcatedWeights.ReSize(FNeuronWeightList.GetTotalSize(),1,1); + FConcatedWeights.ReSize(FNeuronWeightList.Count, 1, FNeuronWeightList[0].Size); - FConcatedWInter.ReSize(FNeuronWeightList.GetTotalSize(),1,1); + FConcatedWInter.ReSize(FNeuronWeightList[0].Size, 1, FNeuronWeightList.Count); //WriteLn(' Layer:', Self.LayerIdx,' Vector:',FVectorSize,' Neuron count:',FNeuronWeightList.Count,' Output size:',FOutput.Size); FShouldInterleaveWeights := true; @@ -5304,10 +6301,14 @@ procedure TNNetReLUBase.Backpropagate(); end; { TNNetMulLearning } -constructor TNNetMulLearning.Create(pMul: integer); +constructor TNNetMulLearning.Create(pMul: TNeuralFloat); begin inherited Create(); - FStruct[0] := pMul; + FFloatSt[0] := pMul; + if pMul = 0 then + begin + FErrorProc('TNNetMulLearning or TNNetMulByConstant can not be zero.'); + end; end; procedure TNNetMulLearning.Backpropagate(); @@ -5317,7 +6318,7 @@ procedure TNNetMulLearning.Backpropagate(); StartTime := Now(); Inc(FBackPropCallCurrentCnt); if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; - FOutputError.Mul(FStruct[0]); + FOutputError.Mul(FFloatSt[0]); FBackwardTime := FBackwardTime + (Now() - StartTime); inherited Backpropagate(); end; @@ -5329,9 +6330,6 @@ constructor TNNetSum.Create(aL: array of TNNetLayer); SizeX, SizeY, Deep: integer; begin inherited Create(); - SizeX := aL[0].FOutput.SizeX; - SizeY := aL[0].FOutput.SizeY; - Deep := aL[0].FOutput.Depth; if Length(aL) < 1 then begin @@ -5339,6 +6337,10 @@ constructor TNNetSum.Create(aL: array of TNNetLayer); end else begin + SizeX := aL[0].FOutput.SizeX; + SizeY := aL[0].FOutput.SizeY; + Deep := aL[0].FOutput.Depth; + for LayerCnt := Low(aL) to High(aL) do begin if @@ -5929,6 +6931,175 @@ function THistoricalNets.AddSuperResolution(pSizeX, pSizeY, BottleNeck, pNeurons Result := AddLayer( TNNetConvolutionLinear.Create(3,1,0,0) ); end; +// Ported code from: +// https://github.com/tgautam03/Transformers/blob/master/classification.ipynb +procedure TNNet.AddSingleHeadSelfAttention( + out Attended, W: TNNetLayer); +var + x, Query, Key, ValueT: TNNetLayer; // WT, YT, Value + EmbeddingDim: integer; +begin + x := GetLastLayer(); + EmbeddingDim := x.Output.Depth; + Query := AddLayerAfter( TNNetPointwiseConvLinear.Create(EmbeddingDim), x); + Key := AddLayerAfter( TNNetPointwiseConvLinear.Create(EmbeddingDim), x); + (*Value:=*)AddLayerAfter( TNNetPointwiseConvLinear.Create(EmbeddingDim), x); + ValueT := AddLayer( TNNetTransposeXD.Create() ); + (*WT := *)AddLayer( TNNetDotProducts.Create(Query, Key) ); + (*WT := *)AddLayer( TNNetMulByConstant.Create(1/Sqrt(EmbeddingDim)) ); + (*WT := *)AddLayer( TNNetReLUL.Create(-500,+500,0) ); + (*W := *) AddLayer( TNNetTransposeXD.Create() ); + W := AddLayer( TNNetPointwiseSoftMax.Create() ); + (*YT := *)AddLayer( TNNetDotProducts.Create(ValueT, W) ); + Attended := AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); +end; + +function TNNet.AddSelfAttention(Heads: integer): TNNetLayer; +var + W : TNNetLayer; + // Query, Key, ValueT: TNNetLayer; // WT, YT, Value + PreviousLayer: TNNetLayer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + HeadCnt: integer; + QueryGroup, KeyGroup, {ValueGroup, }ValueTGroup: TNNetLayer; +begin + if Heads <= 1 then + begin + AddSingleHeadSelfAttention(Result, W); + end + else + begin + PreviousLayer := GetLastLayer(); + SetLength(EachGroupOutput, Heads); + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Heads; + for HeadCnt := 0 to Heads - 1 do + begin + QueryGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + KeyGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + {ValueGroup := }AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + ValueTGroup := AddLayer( TNNetTransposeXD.Create() ); + (*W := *)AddLayer( TNNetDotProducts.Create(QueryGroup, KeyGroup) ); + (*W := *)AddLayer( TNNetReLUL.Create(-100,+100,0) ); + (*W := *)AddLayer( TNNetMulByConstant.Create(1/Sqrt(InputChannelsPerGroup)) ); + W := AddLayer( TNNetPointwiseSoftMax.Create() ); + (*YT := *)AddLayer( TNNetDotProducts.Create(ValueTGroup, W) ); + EachGroupOutput[HeadCnt] := GetLastLayer(); + end; + AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); + SetLength(EachGroupOutput, 0); + Result := AddLayer( TNNetPointwiseConvLinear.Create(PreviousLayer.FOutput.Depth) ); + end; +end; + +function TNNet.AddSelfAttentionCAI(Heads: integer): TNNetLayer; +var + W: TNNetLayer; + PreviousLayer: TNNetLayer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + HeadCnt: integer; + QueryGroup, KeyGroup, ValueGroup, ValueTGroup: TNNetLayer; +begin + if Heads <= 1 then + begin + AddSingleHeadSelfAttention(Result, W); + end + else + begin + PreviousLayer := GetLastLayer(); + SetLength(EachGroupOutput, Heads); + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Heads; + for HeadCnt := 0 to Heads - 1 do + begin + QueryGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + KeyGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + ValueGroup := AddLayerAfter( TNNetPointwiseConvLinear.Create(InputChannelsPerGroup), PreviousLayer); + ValueTGroup := AddLayerAfter( TNNetTransposeXD.Create(), ValueGroup); + (*W := *)AddLayer( TNNetDotProducts.Create(QueryGroup, KeyGroup) ); + (*W := *)AddLayer( TNNetLayerMaxNormalization.Create() ); + W := AddLayer( TNNetPointwiseSoftMax.Create() ); + (*YT := *)AddLayer( TNNetDotProducts.Create(ValueTGroup, W) ); + EachGroupOutput[HeadCnt] := GetLastLayer(); + end; + AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); + SetLength(EachGroupOutput, 0); + // Groups with few channels tend to be numerically unstable + if InputChannelsPerGroup < 64 then + begin + AddLayer( TNNetMulByConstant.Create(InputChannelsPerGroup/64) ); + end; + Result := AddLayer( TNNetPointwiseConvLinear.Create(PreviousLayer.FOutput.Depth) ); + end; +end; + +// Ported code from: +// https://github.com/tgautam03/Transformers/blob/master/classification.ipynb +procedure TNNet.AddSingleHeadTransformerBlock( + out Result, W: TNNetLayer; + HasNorm: boolean = False); +var + PrevLayer, AttendedPlusPrev, Attended: TNNetLayer; + EmbeddingDim: integer; +begin + PrevLayer := GetLastLayer(); + EmbeddingDim := PrevLayer.Output.Depth; + AddSingleHeadSelfAttention(Attended, W); + AddLayer( TNNetSum.Create([Attended, PrevLayer]) ); + if HasNorm + then AttendedPlusPrev := AddLayer( TNNetMovingScale.Create() ) + else AttendedPlusPrev := GetLastLayer(); + AddLayer( TNNetPointwiseConvReLU.Create(EmbeddingDim*4) ); + AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); + if HasNorm then Result := AddLayer( TNNetMovingScale.Create() ); + AddLayer( TNNetSum.Create([ GetLastLayer(), AttendedPlusPrev]) ); + if HasNorm + then Result := AddLayer( TNNetMovingScale.Create() ) + else Result := GetLastLayer(); +end; + +function TNNet.AddTransformerBlock(Heads: integer; + IntermediateDim: integer; HasNorm: boolean = False + ): TNNetLayer; +var + PrevLayer, AttendedPlusPrev, Attended: TNNetLayer; + EmbeddingDim: integer; +begin + PrevLayer := GetLastLayer(); + EmbeddingDim := PrevLayer.Output.Depth; + Attended := AddSelfAttention(Heads); + AddLayer( TNNetSum.Create([Attended, PrevLayer]) ); + if HasNorm + then AttendedPlusPrev := AddLayer( TNNetMovingScale.Create() ) + else AttendedPlusPrev := GetLastLayer(); + AddLayer( TNNetPointwiseConvReLU.Create(IntermediateDim) ); + AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); + if HasNorm then AddLayer( TNNetMovingScale.Create() ); + AddLayer( TNNetSum.Create([ GetLastLayer(), AttendedPlusPrev]) ); + if HasNorm + then Result := AddLayer( TNNetMovingScale.Create() ) + else Result := GetLastLayer(); +end; + +function TNNet.AddTransformerBlockCAI(Heads: integer; + IntermediateDim: integer; + HasNorm: boolean = False + ): TNNetLayer; +var + PrevLayer, AttendedPlusPrev, Attended: TNNetLayer; + EmbeddingDim: integer; +begin + PrevLayer := GetLastLayer(); + EmbeddingDim := PrevLayer.Output.Depth; + Attended := AddSelfAttentionCAI(Heads); + AttendedPlusPrev := AddLayer( TNNetSum.Create([Attended, PrevLayer]) ); + AddLayer( TNNetPointwiseConvReLU.Create(IntermediateDim) ); + if HasNorm then AddLayer( TNNetMovingStdNormalization.create() ); + AddLayer( TNNetPointwiseConvLinear.Create(EmbeddingDim) ); + AddLayer( TNNetSum.Create([ GetLastLayer(), AttendedPlusPrev]) ); + Result := GetLastLayer(); +end; + { TNNetFullConnectLinear } procedure TNNetFullConnectLinear.ComputePreviousLayerErrorCPU(); @@ -6444,13 +7615,23 @@ destructor TNNetConcatBase.Destroy(); function TNNetConcatBase.SaveStructureToString(): string; var I: integer; + LayersStr: string; +//begin +// Result := inherited SaveStructureToString + ':'; +// for I := 0 to FPrevLayerList.Count - 1 do +// begin +// if I > 0 then Result := Result + ';'; +// Result := Result + IntToStr(FPrevLayerList[I].FLayerIdx); +// end; +//end; begin - Result := inherited SaveStructureToString + ':'; + LayersStr := ''; for I := 0 to FPrevLayerList.Count - 1 do begin - if I > 0 then Result := Result + ';'; - Result := Result + IntToStr(FPrevLayerList[I].FLayerIdx); + if I > 0 then LayersStr := LayersStr + ';'; + LayersStr := LayersStr + IntToStr(FPrevLayerList[I].FLayerIdx); end; + Result := StringReplace(inherited SaveStructureToString,'::',':'+LayersStr+':',[rfReplaceAll]); end; procedure TNNetConcatBase.BackpropagateConcat(); @@ -6574,14 +7755,25 @@ procedure TNNetSplitChannels.Backpropagate(); function TNNetSplitChannels.SaveStructureToString(): string; var I, MaxChannels: integer; + LayersStr: string; +//begin +// Result := inherited SaveStructureToString + ':'; +// MaxChannels := Length(FChannels) - 1; +// for I := 0 to MaxChannels do +// begin +// if I > 0 then Result := Result + ';'; +// Result := Result + IntToStr(FChannels[I]); +// end; +//end; begin - Result := inherited SaveStructureToString + ':'; + LayersStr := ''; MaxChannels := Length(FChannels) - 1; for I := 0 to MaxChannels do begin - if I > 0 then Result := Result + ';'; - Result := Result + IntToStr(FChannels[I]); + if I > 0 then LayersStr := LayersStr + ';'; + LayersStr := LayersStr + IntToStr(FChannels[I]); end; + Result := StringReplace(inherited SaveStructureToString,'::',':'+LayersStr+':',[rfReplaceAll]); end; procedure TestDataParallelism(NN: TNNet); @@ -7114,14 +8306,15 @@ constructor TNNetDataParallelism.Create(CloneNN: TNNet; pSize: integer; pFreeObj NN: TNNet; begin inherited Create(pFreeObjects); - NNData := CloneNN.SaveToString(); - + NNData := CloneNN.SaveStructureToString(); for I := 1 to pSize do begin NN := TNNet.Create; - NN.LoadFromString(NNData); + NN.LoadStructureFromString(NNData); + NN.CopyWeights(CloneNN); Self.Add(NN); end; + NNData := ''; end; constructor TNNetDataParallelism.Create(pSize: integer; pFreeObjects: Boolean); @@ -7299,7 +8492,7 @@ procedure TNNetDataParallelism.DisableOpenCL(); end; procedure TNNetDataParallelism.EnableOpenCL(platform_id: cl_platform_id; - device_id: cl_device_id); + device_id: cl_device_id; maxNumOpenCLThreads : integer = -1); var I: integer; begin @@ -7863,8 +9056,12 @@ procedure TNNetIdentity.SetPrevLayer(pPrevLayer: TNNetLayer); end; procedure TNNetIdentity.Compute; +var + StartTime: double; begin + StartTime := Now(); FOutput.CopyNoChecks(FPrevLayer.FOutput); + FForwardTime := FForwardTime + (Now() - StartTime); end; procedure TNNetIdentity.Backpropagate; @@ -8196,10 +9393,14 @@ constructor TNNetFullConnectReLU.Create(pSize: integer; pSuppressBias: integer = end; { TNNetSoftMax } -procedure TNNetSoftMax.Compute(); +procedure TNNetSoftMax.Compute; +var + StartTime: double; begin - inherited Compute(); + StartTime := Now(); + FOutput.CopyNoChecks(FPrevLayer.FOutput); FSoftTotalSum := FOutput.SoftMax(); + FForwardTime := FForwardTime + (Now() - StartTime); end; { TNNetConvolutionReLU } @@ -8264,6 +9465,12 @@ function TNNetPoolBase.CalcOutputSize(pInputSize: integer): integer; constructor TNNetPoolBase.Create(pPoolSize: integer; pStride:integer = 0; pPadding: integer = 0); begin inherited Create; + + {$IFDEF THREADSAVERANDOM} + fRandEng := TRandomGenerator.Create(raChaCha); + fRandEng.Init(0); + {$ENDIF} + FPoolSize := pPoolSize; SetLength(FMaxPosX, 0); SetLength(FMaxPosY, 0); @@ -8283,6 +9490,9 @@ constructor TNNetPoolBase.Create(pPoolSize: integer; pStride:integer = 0; pPaddi destructor TNNetPoolBase.Destroy(); begin + {$IFDEF THREADSAVERANDOM} + fRandEng.Free; + {$ENDIF} SetLength(FMaxPosX, 0); SetLength(FMaxPosY, 0); SetLength(FInputDivPool, 0); @@ -8361,7 +9571,7 @@ procedure TNNetMaxPool.ComputeWithStride(); begin OutputMaxX := Output.SizeX - 1; OutputMaxY := Output.SizeY - 1; - MaxD := Output.Depth - 1; + MaxD := FPrevLayer.Output.Depth - 1; LocalPoolSizeM1 := FPoolSize - 1; InputSizeXM1 := FInputCopy.SizeX - 1; InputSizeYM1 := FInputCopy.SizeY - 1; @@ -8397,6 +9607,13 @@ procedure TNNetMaxPool.ComputeWithStride(); end; // of for CntD end; +{$IFDEF THREADSAVERANDOM} +function TNNetPoolBase.Random(range : integer) : integer; +begin + Result := fRandEng.RandInt(range); +end; +{$ENDIF} + procedure TNNetPoolBase.Backpropagate(); var StartTime: double; @@ -8430,7 +9647,7 @@ procedure TNNetPoolBase.BackpropagateDefaultStride(); begin MaxX := Output.SizeX - 1; MaxY := Output.SizeY - 1; - MaxD := Output.Depth - 1; + MaxD := FPrevLayer.Output.Depth - 1; //Although the below line makes all the sense, it might brake compatibility //with existing code. //if FStride > 1 then FOutputError.Mul( Min(FStride, 4) ); @@ -8464,7 +9681,7 @@ procedure TNNetPoolBase.BackpropagateWithStride(); begin MaxX := Output.SizeX - 1; MaxY := Output.SizeY - 1; - MaxD := Output.Depth - 1; + MaxD := FPrevLayer.Output.Depth - 1; //Although the below line makes all the sense, it might brake compatibility //with existing code. //if FStride > 1 then FOutputError.Mul( Min(FStride, 4) ); @@ -8524,7 +9741,7 @@ procedure TNNetConvolutionBase.SetPrevLayer(pPrevLayer: TNNetLayer); FInputPrepared.Resize(FOutputSizeX, FOutputSizeY, FVectorSize); end; RefreshNeuronWeightList(); - if ShouldUseInterleavedDotProduct then + if ShouldUseInterleavedDotProduct (*or FPointwise*) then begin FShouldConcatWeights := true; FShouldInterleaveWeights := true; @@ -8927,7 +10144,7 @@ procedure TNNetConvolution.Backpropagate(); //BackpropagateFastCPUDev(); //BackpropagateFastCPU(); - BackpropagateFastTiledCPU(); + BackpropagateFastTiledCPU(); // This is our default backprop //BackpropagateCPU(); {$IFDEF CheckRange}ForceRangeWeights(1000);{$ENDIF} @@ -9378,6 +10595,14 @@ procedure TNNetConvolution.BackpropagateFastTiledCPU(); end; end; + (* + if (FPointwise and FCalculatePrevLayerError) then + begin + FPrevLayerErrorPadded.DotProductsPointwise(FConcatedWInter, FOutputErrorDeriv); + LocalPrevError.Add(FPrevLayerErrorPadded); + end; + *) + if FPadding > 0 then begin FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); @@ -9402,47 +10627,56 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); LocalOutputErrorDeriv: TNeuralFloat; SmoothLocalOutputErrorDeriv: TNeuralFloat; LocalWeight, LocalPrevError: TNNetVolume; - {SrcPtr,} LocalDestPtr: TNeuralFloatArrPtr; - SmoothLocalOutputErrorDerivPtr: pointer; - PrevNumElements, PrevMissedElements: integer; - {$IFDEF AVX64}PtrNeuronDelta : TNeuralFloatArrPtr; {$ENDIF} + //{SrcPtr,} LocalDestPtr: TNeuralFloatArrPtr; + //SmoothLocalOutputErrorDerivPtr: pointer; + {$IFDEF Debug} {$IFDEF AVX64} PrevNumElements, PrevMissedElements: integer; {$ENDIF} {$ENDIF} + {$IFDEF Release}{$IFDEF AVX64} PtrNeuronDelta : TNeuralFloatArrPtr; {$ENDIF} {$ENDIF} + + PtrPreparedInput: TNeuralFloatArrPtr; PrevPtrA, PrevPtrB: TNeuralFloatArrPtr; - NeuronWeights: integer; - LocalLearningErrorDerivPtr: pointer; - localNumElements, MissedElements: integer; + + //LocalLearningErrorDerivPtr: pointer; + {$IFDEF Debug} {$IFDEF AVX64}NeuronWeights, localNumElements, MissedElements: integer; {$ENDIF} {$ENDIF} MaxPrevX, MaxPrevY: integer; - InterErrorDeriv, InterInput: TNNetVolume; - NeuronCnt, NeuronPosCnt: integer; - LocalDelta: TNNetVolume; begin - InterErrorDeriv := TNNetVolume.Create(); - InterInput := TNNetVolume.Create(); MaxX := OutputError.SizeX - 1; MaxY := OutputError.SizeY - 1; MaxD := OutputError.Depth - 1; - LocalDestPtr := nil; - MaxPrevX := 1 + FPrevLayer.FOutputError.SizeX - FFeatureSizeX; - MaxPrevY := 1 + FPrevLayer.FOutputError.SizeY - FFeatureSizeY; - LocalPrevError := FPrevLayer.OutputError; + //LocalDestPtr := nil; + if FPadding > 0 then + begin + FPrevLayerErrorPadded.Fill(0); + LocalPrevError := FPrevLayerErrorPadded; + end + else + begin + LocalPrevError := FPrevLayer.OutputError; + end; + MaxPrevX := 1 + LocalPrevError.SizeX - FFeatureSizeX; + MaxPrevY := 1 + LocalPrevError.SizeY - FFeatureSizeY; + {$IFDEF Debug} {$IFDEF AVX64} PrevNumElements := (FSizeXDepth div 4) * 4; PrevMissedElements := FSizeXDepth - PrevNumElements; NeuronWeights := FArrNeurons[0].Delta.Size; localNumElements := (NeuronWeights div 4) * 4; MissedElements := NeuronWeights - localNumElements; - SmoothLocalOutputErrorDerivPtr := Addr(SmoothLocalOutputErrorDeriv); - LocalLearningErrorDerivPtr := Addr(LocalLearningErrorDeriv); + {$ENDIF} {$ENDIF} + //SmoothLocalOutputErrorDerivPtr := Addr(SmoothLocalOutputErrorDeriv); + //LocalLearningErrorDerivPtr := Addr(LocalLearningErrorDeriv); begin for OutputY := 0 to MaxY do begin - PrevY := (OutputY*FStride)-FPadding; + PrevY := (OutputY*FStride); for OutputX := 0 to MaxX do begin - PrevX := (OutputX*FStride)-FPadding; + PrevX := (OutputX*FStride); OutputRawPos := FOutputErrorDeriv.GetRawPos(OutputX, OutputY); - if (FCalculatePrevLayerError) then LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); - //PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); + //TODO: the next line is probably wrong. + // this is actually never used afterwards -> I comment it out + //if (FCalculatePrevLayerError) then LocalDestPtr := LocalPrevError.GetRawPtr(OutputX, OutputY); + PtrPreparedInput := FInputPrepared.GetRawPtr(OutputX, OutputY); CanBackpropOnPos := - (PrevX >= 0) and (PrevY >= 0) and + //(PrevX >= 0) and (PrevY >= 0) and (PrevX < MaxPrevX) and (PrevY < MaxPrevY); for OutputD := 0 to MaxD do @@ -9450,7 +10684,7 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); {$IFDEF FPC} if FActivationFn = @RectifiedLinearUnit then begin - if FOutput.FData[OutputRawPos] > 0 then + if FOutputRaw.FData[OutputRawPos] >= 0 then begin LocalOutputErrorDeriv := FOutputError.FData[OutputRawPos]; end @@ -9479,7 +10713,17 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); LocalLearningErrorDeriv := (-FLearningRate) * LocalOutputErrorDeriv; if (LocalLearningErrorDeriv <> 0.0) then begin - //FArrNeurons[OutputD].Delta.MulAdd(LocalLearningErrorDeriv, PtrPreparedInput); + {$IFNDEF AVX64} + FArrNeurons[OutputD].Delta.MulAdd(LocalLearningErrorDeriv, PtrPreparedInput); + {$ELSE} + {$IFDEF Debug} + if localNumElements + MissedElements <> FArrNeurons[OutputD].Delta.Size + then FErrorProc('Error at TNNetConvolution.BackpropagateFastCPU(): neuron size doesn''t match.'); + {$ENDIF} + PtrNeuronDelta := FArrNeurons[OutputD].Delta.DataPtr; + asm_avx64_train_neuron + {$ENDIF} + {$IFDEF FPC} FArrNeurons[OutputD].FBiasDelta += LocalLearningErrorDeriv; {$ELSE} @@ -9487,10 +10731,10 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); FArrNeurons[OutputD].FBiasDelta + LocalLearningErrorDeriv; {$ENDIF} - if (FCalculatePrevLayerError) then + if (FCalculatePrevLayerError and not(FPointwise)) then begin LocalWeight := FArrNeurons[OutputD].Weights; - if FPointwise then + (*if FPointwise then begin {$IFNDEF AVX64} LocalPrevError.MulAdd(LocalDestPtr, LocalWeight.DataPtr, LocalOutputErrorDeriv, FInputCopy.Depth); @@ -9505,7 +10749,7 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); asm_avx64_prev_backprop; {$ENDIF} end - else + else *) begin if CanBackpropOnPos then begin @@ -9552,6 +10796,13 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); end; end; + if (FPointwise and FCalculatePrevLayerError) then + begin + FPrevLayerErrorPadded.DotProductsPointwise(FConcatedWInter, FOutputErrorDeriv); + LocalPrevError.Add(FPrevLayerErrorPadded); + end; + + (* FOutputErrorDeriv.Mul(-FLearningRate); InterErrorDeriv.InterleaveWithDepthFrom(FOutputErrorDeriv, FOutputErrorDeriv.SizeX * FOutputErrorDeriv.SizeY); InterInput.InterleaveWithDepthFrom(FInputPrepared, FInputPrepared.SizeX * FInputPrepared.SizeY); @@ -9573,15 +10824,18 @@ procedure TNNetConvolution.BackpropagateFastCPUDev(); ); end; end; + *) + + if FPadding > 0 then + begin + FPrevLayer.OutputError.AddArea(0, 0, FPadding, FPadding, FPrevLayer.OutputError.SizeX, FPrevLayer.OutputError.SizeY, FPrevLayerErrorPadded); + end; if (not FBatchUpdate) then begin for OutputD := 0 to MaxD do FArrNeurons[OutputD].UpdateWeights(FInertia); AfterWeightUpdate(); end; - - InterErrorDeriv.Free; - InterInput.Free; end; constructor TNNetConvolutionAbstract.Create(pFeatureSize, pInputPadding, pStride: integer; pSuppressBias: integer = 0); @@ -9595,8 +10849,8 @@ constructor TNNetConvolutionAbstract.Create(pFeatureSize, pInputPadding, pStride if FPadding > 0 then begin FInputCopy := TNNetVolume.Create; - FPrevLayerErrorPadded := TNNetVolume.Create; end; + FPrevLayerErrorPadded := TNNetVolume.Create; end; destructor TNNetConvolutionAbstract.Destroy(); @@ -9604,8 +10858,8 @@ destructor TNNetConvolutionAbstract.Destroy(); if FPadding > 0 then begin FInputCopy.Free; - FPrevLayerErrorPadded.Free; end; + FPrevLayerErrorPadded.Free; inherited Destroy(); end; @@ -10080,6 +11334,245 @@ function TNNetInput.DisableErrorCollection: TNNetInput; Result := Self; end; +{ TNNetEmbedding } + +procedure TNNetEmbedding.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FOutput.ReSize(pPrevLayer.Output.Size, 1, FEmbeddingSize); + FOutputError.ReSize(FOutput); + SetLength(FInputTokens, pPrevLayer.Output.Size); +end; + +constructor TNNetEmbedding.Create(pVocabSize, pEmbeddingSize: integer; + EncodeZero: integer = 0; ScaleEmbedding: TNeuralFloat = 2); +begin + inherited Create(); + FVocabSize := pVocabSize; + FEmbeddingSize := pEmbeddingSize; + FEncodeZero := (EncodeZero>0); + FScaleEmbedding := ScaleEmbedding; + FStruct[0] := pVocabSize; + FStruct[1] := pEmbeddingSize; + FStruct[2] := EncodeZero; + FFloatSt[0] := ScaleEmbedding; + if FNeurons.Count < 1 then AddMissingNeurons(1); + SetNumWeightsForAllNeurons(pVocabSize, 1, pEmbeddingSize); + InitDefault(); + AfterWeightUpdate(); +end; + +destructor TNNetEmbedding.Destroy; +begin + SetLength(FInputTokens, 0); + inherited Destroy; +end; + +procedure TNNetEmbedding.InitDefault; +begin + InitUniform(FScaleEmbedding); +end; + +procedure TNNetEmbedding.Compute(); +var + MaxToken, CntToken, CurrentToken: integer; + SourcePtr, DestPtr: TNeuralFloatArrPtr; + LocalWeights: TNNetVolume; + StartTime: double; +begin + StartTime := Now(); + MaxToken := FPrevLayer.Output.Size - 1; + LocalWeights := FNeurons[0].Weights; + FOutput.Fill(0); + {$IFDEF Debug} + if FEmbeddingSize=0 then + begin + FErrorProc('Embedding size can not be zero.'); + end; + if LocalWeights.Size <> FEmbeddingSize * FVocabSize then + begin + FErrorProc('Weights size do not match at TNNetEmbedding:' + + IntToStr(LocalWeights.Size)+' EmbeddingSize * Vocab Size:'+ + IntToStr(FEmbeddingSize * FVocabSize)+' '+ + ' EmbeddingSize: '+IntToStr(FEmbeddingSize)+ + ' Vocab Size: '+IntToStr(FVocabSize) + ); + end; + {$ENDIF} + for CntToken := 0 to MaxToken do + begin + CurrentToken := Round(FPrevLayer.Output.FData[CntToken]); + if CurrentToken >= FVocabSize then + begin + FErrorProc('Token is bigger than vocab size:'+ IntToStr(CurrentToken)); + CurrentToken := 0; + end; + if FEncodeZero or (CurrentToken>0) then + begin + FInputTokens[CntToken] := CurrentToken; + SourcePtr := LocalWeights.GetRawPtr(CurrentToken, 0, 0); + DestPtr := FOutput.GetRawPtr(CntToken, 0, 0); + // TODO: replace this call by a copy function. + TNNetVolume.MulAdd(DestPtr, SourcePtr, 1, FEmbeddingSize); + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + +constructor TNNetEmbedding.Create; +begin + inherited; + + FVocabSize := 1; + FEmbeddingSize := 1; + FEncodeZero := False; + FScaleEmbedding := 2; + FStruct[0] := FVocabSize; + FStruct[1] := FEmbeddingSize; + FStruct[2] := 0; + FFloatSt[0] := 2; + if FNeurons.Count < 1 then AddMissingNeurons(1); + SetNumWeightsForAllNeurons(FVocabSize, 1, FEmbeddingSize); + InitDefault(); + AfterWeightUpdate(); +end; + +procedure TNNetEmbedding.Backpropagate(); +var + MaxToken, CntToken, CurrentToken: integer; + SourcePtr, DestPtr: TNeuralFloatArrPtr; + LocalWeights, LocalDelta: TNNetVolume; + StartTime: double; +begin + LocalWeights := FNeurons[0].Weights; + LocalDelta := FNeurons[0].Delta; + {$IFDEF Debug} + if FEmbeddingSize=0 then + begin + FErrorProc('Embedding size can not be zero.'); + end; + if FBackPropCallCurrentCnt > FDepartingBranchesCnt then + begin + FErrorProc('Backprop call count does not look right at TNNetEmbedding: '+IntToStr(FBackPropCallCurrentCnt)+' '+IntToStr(FDepartingBranchesCnt)); + end; + if LocalWeights.Size <> FEmbeddingSize * FVocabSize then + begin + FErrorProc('Weights size do not match at TNNetEmbedding:' + + IntToStr(LocalWeights.Size)+' EmbeddingSize * Vocab Size:'+ + IntToStr(FEmbeddingSize * FVocabSize)+' '+ + ' EmbeddingSize: '+IntToStr(FEmbeddingSize)+ + ' Vocab Size: '+IntToStr(FVocabSize) + ); + end; + if LocalDelta.Size <> LocalWeights.Size then + begin + FErrorProc('Weights size and Delta Size do not match at TNNetEmbedding.' + + ' Weights Size: '+IntToStr(LocalWeights.Size)+ + ' Delta Size: '+IntToStr(LocalDelta.Size) + ); + end; + //WriteLn( LocalWeights.GetSum() ); + {$ENDIF} + Inc(FBackPropCallCurrentCnt); + if FBackPropCallCurrentCnt < FDepartingBranchesCnt then exit; + StartTime := Now(); + MaxToken := FPrevLayer.Output.Size - 1; + for CntToken := 0 to MaxToken do + begin + CurrentToken := FInputTokens[CntToken]; + if FEncodeZero or (CurrentToken>0) then + begin + SourcePtr := FOutputError.GetRawPtr(CntToken); + if FBatchUpdate + then DestPtr := LocalDelta.GetRawPtr(CurrentToken, 0, 0) + else DestPtr := LocalWeights.GetRawPtr(CurrentToken, 0, 0); + TNNetVolume.MulAdd(DestPtr, SourcePtr, FLearningRate, FEmbeddingSize); + end; + end; + FBackwardTime := FBackwardTime + (Now() - StartTime); +end; + +{ TNNetTokenAndPositionalEmbedding } + +procedure TNNetTokenAndPositionalEmbedding.SetPrevLayer(pPrevLayer: TNNetLayer); +begin + inherited SetPrevLayer(pPrevLayer); + FPositionalEmbedding.ReSize(FOutput); + FPositionalEmbedding.PositionalEncoding(FEmbeddingSize); + if FScalePositional<>1 then FPositionalEmbedding.Mul(FScalePositional); +end; + +constructor TNNetTokenAndPositionalEmbedding.Create(pVocabSize, + pEmbeddingSize: integer; + EncodeZero: integer = 0; + ScaleEmbedding: TNeuralFloat = 2; + ScalePositional: TNeuralFloat = 1; + PositionalEmbeddingN: integer = 0); +begin + inherited Create(pVocabSize, pEmbeddingSize, EncodeZero, ScaleEmbedding); + FPositionalEmbedding := TNNetVolume.Create; + if PositionalEmbeddingN=0 + then FPositionalEmbeddingN := 10000 + else FPositionalEmbeddingN := PositionalEmbeddingN; + FScalePositional := ScalePositional; + FStruct[3] := FPositionalEmbeddingN; + FFloatSt[1] := FScalePositional; +end; + +destructor TNNetTokenAndPositionalEmbedding.Destroy; +begin + FPositionalEmbedding.Free; + inherited Destroy; +end; + +procedure TNNetTokenAndPositionalEmbedding.Compute(); +var + MaxToken, CntToken, CurrentToken: integer; + SourcePtr, SourcePtrPos, DestPtr: TNeuralFloatArrPtr; + LocalWeights: TNNetVolume; + StartTime: double; +begin + StartTime := Now(); + MaxToken := FPrevLayer.Output.Size - 1; + LocalWeights := FNeurons[0].Weights; + FOutput.Fill(0); + {$IFDEF Debug} + if FEmbeddingSize=0 then + begin + FErrorProc('Embedding size can not be zero.'); + end; + if LocalWeights.Size <> FEmbeddingSize * FVocabSize then + begin + FErrorProc('Weights size do not match at TNNetEmbedding:' + + IntToStr(LocalWeights.Size)+' EmbeddingSize * Vocab Size:'+ + IntToStr(FEmbeddingSize * FVocabSize)+' '+ + ' EmbeddingSize: '+IntToStr(FEmbeddingSize)+ + ' Vocab Size: '+IntToStr(FVocabSize) + ); + end; + {$ENDIF} + for CntToken := 0 to MaxToken do + begin + CurrentToken := Round(FPrevLayer.Output.FData[CntToken]); + if CurrentToken >= FVocabSize then + begin + FErrorProc('Token is bigger than vocab size:'+ IntToStr(CurrentToken)); + CurrentToken := 0; + end; + if FEncodeZero or (CurrentToken>0) then + begin + FInputTokens[CntToken] := CurrentToken; + SourcePtr := LocalWeights.GetRawPtr(CurrentToken, 0, 0); + SourcePtrPos := FPositionalEmbedding.GetRawPtr(CntToken, 0, 0); + DestPtr := FOutput.GetRawPtr(CntToken, 0, 0); + // TODO: replace this call by a copy function. + TNNetVolume.MulAdd(DestPtr, SourcePtr, 1, FEmbeddingSize); + TNNetVolume.MulAdd(DestPtr, SourcePtrPos, 1, FEmbeddingSize); + end; + end; + FForwardTime := FForwardTime + (Now() - StartTime); +end; + procedure TNNetInputBase.Compute; begin FOutputError.Fill(0); @@ -10186,25 +11679,38 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; var S, S2: TStringList; St: array [0..csNNetMaxParameterIdx] of integer; + Ft: array [0..csNNetMaxParameterIdx] of TNeuralFloat; aL: array of TNNetLayer; aIdx: TNeuralIntegerArray; IdxCnt: integer; I: integer; + ClassNameStr: string; + SCount: integer; + fmt : TFormatSettings; begin Result := nil; S := CreateTokenizedStringList(strData,':'); S2 := CreateTokenizedStringList(strData,';'); - if S.Count >= 2 then + fmt := GetDefaultNumericFormat; + SCount := S.Count; + + if SCount >= 2 then begin + ClassNameStr := S[0]; + // This code is good for debug + // if ClassNameStr = 'TNNetSum' then + // begin + // WriteLn('hello'); + // end; + for I := Low(St) to High(St) do St[i] := 0; S2.DelimitedText := S[1]; if S2.Count > 0 then begin for I := 0 to Min(S2.Count - 1, High(St)) do St[I] := StrToInt(S2[I]); end; - - if S.Count = 3 then + if SCount >= 3 then begin S2.DelimitedText := S[2]; @@ -10225,13 +11731,33 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; end; end; + if SCount >= 4 then + begin + for I := Low(Ft) to High(Ft) do Ft[i] := 0; + S2.DelimitedText := S[3]; + if S2.Count > 0 then + begin + for I := 0 to Min(S2.Count - 1, High(St)) do Ft[I] := StrToFloat(S2[I], fmt); + end; + end + else + // backward compatibility + begin + for I := Low(Ft) to High(Ft) do Ft[i] := St[i]; + end; if S[0] = 'TNNetInput' then Result := TNNetInput.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetIdentity' then Result := TNNetIdentity.Create() else + if S[0] = 'TNNetTransposeXD' then Result := TNNetTransposeXD.Create() else + if S[0] = 'TNNetTransposeYD' then Result := TNNetTransposeYD.Create() else if S[0] = 'TNNetDebug' then Result := TNNetDebug.Create(St[0], St[1]) else + if S[0] = 'TNNetDotProducts' then Result := TNNetDotProducts.Create(St[0], St[1]) else if S[0] = 'TNNetPad' then Result := TNNetPad.Create(St[0]) else + if S[0] = 'TNNetPadXY' then Result := TNNetPadXY.Create(St[0], St[1]) else + if S[0] = 'TNNetCrop' then Result := TNNetCrop.Create(St[0], St[1], St[2], St[3]) else if S[0] = 'TNNetIdentityWithoutBackprop' then Result := TNNetIdentityWithoutBackprop.Create() else if S[0] = 'TNNetReLU' then Result := TNNetReLU.Create() else + if S[0] = 'TNNetReLUP' then Result := TNNetReLUP.Create() else if S[0] = 'TNNetSwish' then Result := TNNetSwish.Create() else if S[0] = 'TNNetHardSwish' then Result := TNNetHardSwish.Create() else if S[0] = 'TNNetSwish6' then Result := TNNetSwish6.Create() else @@ -10256,18 +11782,18 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetLocalConnect' then Result := TNNetLocalConnect.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetLocalProduct' then Result := TNNetLocalProduct.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetLocalConnectReLU' then Result := TNNetLocalConnectReLU.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetMulLearning' then Result := TNNetMulLearning.Create(St[0]) else - if S[0] = 'TNNetMulByConstant' then Result := TNNetMulByConstant.Create(St[0]) else + if S[0] = 'TNNetMulLearning' then Result := TNNetMulLearning.Create(Ft[0]) else + if S[0] = 'TNNetMulByConstant' then Result := TNNetMulByConstant.Create(Ft[0]) else if S[0] = 'TNNetNegate' then Result := TNNetNegate.Create() else if S[0] = 'TNNetLayerSoftMax' then Result := TNNetSoftMax.Create() else - if S[0] = 'TNNetSoftMax' then Result := TNNetSoftMax.Create() else + if S[0] = 'TNNetSoftMax' then Result := TNNetSoftMax.Create(St[0]) else + if S[0] = 'TNNetPointwiseSoftMax' then Result := TNNetPointwiseSoftMax.Create(St[0]) else if S[0] = 'TNNetConvolution' then Result := TNNetConvolution.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionReLU' then Result := TNNetConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionGeLU' then Result := TNNetConvolutionGeLU.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetConvolutionSwish6' then Result := TNNetConvolutionSwish6.Create(St[0], St[1], St[2], St[3], St[4]) else + if S[0] = 'TNNetConvolutionLinear' then Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionSwish' then Result := TNNetConvolutionSwish.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetConvolutionHardSwish' then Result := TNNetConvolutionHardSwish.Create(St[0], St[1], St[2], St[3], St[4]) else - if S[0] = 'TNNetConvolutionLinear' then Result := TNNetConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[4]) else if S[0] = 'TNNetGroupedConvolutionLinear' then Result := TNNetGroupedConvolutionLinear.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else if S[0] = 'TNNetGroupedConvolutionReLU' then Result := TNNetGroupedConvolutionReLU.Create(St[0], St[1], St[2], St[3], St[5], St[4]) else if S[0] = 'TNNetGroupedPointwiseConvLinear' then Result := TNNetGroupedPointwiseConvLinear.Create({pNumFeatures=}St[0], {pGroups=}St[5], {pSuppressBias=}St[4]) else @@ -10282,6 +11808,7 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetPointwiseConvReLU' then Result := TNNetPointwiseConvReLU.Create(St[0], St[4]) else if S[0] = 'TNNetPointwiseConvLinear' then Result := TNNetPointwiseConvLinear.Create(St[0], St[4]) else if S[0] = 'TNNetMaxPool' then Result := TNNetMaxPool.Create(St[0], St[1], St[2]) else + if S[0] = 'TNNetMaxPoolWithPosition' then Result := TNNetMaxPoolWithPosition.Create(St[0], St[1], St[2], St[3], St[4], St[5]) else if S[0] = 'TNNetMaxPoolPortable' then Result := TNNetMaxPoolPortable.Create(St[0], St[1], St[2]) else if S[0] = 'TNNetMinPool' then Result := TNNetMinPool.Create(St[0], St[1], St[2]) else if S[0] = 'TNNetAvgPool' then Result := TNNetAvgPool.Create(St[0]) else @@ -10304,6 +11831,7 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetLayerMaxNormalization' then Result := TNNetLayerMaxNormalization.Create() else if S[0] = 'TNNetLayerStdNormalization' then Result := TNNetLayerStdNormalization.Create() else if S[0] = 'TNNetMovingStdNormalization' then Result := TNNetMovingStdNormalization.Create() else + if S[0] = 'TNNetMovingScale' then Result := TNNetMovingScale.Create(Ft[0],Ft[1]) else if S[0] = 'TNNetChannelStdNormalization' then Result := TNNetChannelStdNormalization.Create() else if S[0] = 'TNNetScaleLearning' then Result := TNNetChannelStdNormalization.Create() else if S[0] = 'TNNetChannelBias' then Result := TNNetChannelBias.Create() else @@ -10318,6 +11846,9 @@ function TNNet.CreateLayer(strData: string): TNNetLayer; if S[0] = 'TNNetLocalResponseNorm2D' then Result := TNNetLocalResponseNorm2D.Create(St[0]) else if S[0] = 'TNNetLocalResponseNormDepth' then Result := TNNetLocalResponseNormDepth.Create(St[0]) else if S[0] = 'TNNetAddAndDiv' then Result := TNNetAddAndDiv.Create(St[0], St[1]) else + if S[0] = 'TNNetAddPositionalEmbedding' then Result := TNNetAddPositionalEmbedding.Create(St[0]) else + if S[0] = 'TNNetEmbedding' then Result := TNNetEmbedding.Create(St[0], St[1], St[2], Ft[0]) else + if S[0] = 'TNNetTokenAndPositionalEmbedding' then Result := TNNetTokenAndPositionalEmbedding.Create(St[0], St[1], St[2], Ft[0], Ft[1], St[3]) else raise Exception.create(strData + ' not allowed in CreateLayer.'); end else @@ -10421,31 +11952,90 @@ function TNNet.AddSeparableConvLinear(pNumFeatures, pFeatureSize, Result := AddLayer( TNNetPointwiseConvLinear.Create(pNumFeatures, pSuppressBias) ); end; -function TNNet.AddGroupedConvolution(Conv2d: TNNetConvolutionClass; - Groups, pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; - pSuppressBias: integer; ChannelInterleaving: boolean): TNNetLayer; +function TNNet.AddGroupedConvolution(Conv2d: TNNetConvolutionClass; + Groups, pNumFeatures, pFeatureSize, pInputPadding, pStride: integer; + pSuppressBias: integer; ChannelInterleaving: boolean): TNNetLayer; +var + PreviousLayer: TNNetLayer; + FeaturesPerGroup: integer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + GroupCnt: integer; +begin + if pInputPadding > 0 then + begin + PreviousLayer := AddLayer( TNNetPad.Create(pInputPadding) ); + end + else + begin + PreviousLayer := GetLastLayer(); + end; + Result := PreviousLayer; + SetLength(EachGroupOutput, Groups); + FeaturesPerGroup := pNumFeatures div Groups; + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Groups; + if Groups = 1 then + begin + Result := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, {pInputPadding=}0, pStride, pSuppressBias) ); + end; + if Groups > 1 then + begin + for GroupCnt := 0 to Groups - 1 do + begin + if ChannelInterleaving + then AddLayerAfter( TNNetSplitChannelEvery.Create(Groups, GroupCnt), PreviousLayer) + else AddLayerAfter( TNNetSplitChannels.Create(GroupCnt*InputChannelsPerGroup, InputChannelsPerGroup), PreviousLayer); + EachGroupOutput[GroupCnt] := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, {pInputPadding=}0, pStride, pSuppressBias) ); + end; + Result := AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); + end; + SetLength(EachGroupOutput, 0); +end; + +function TNNet.AddGroupedDotProducts(A, B: TNNetLayer; Groups: integer; ChannelInterleaving: boolean): TNNetLayer; var PreviousLayer: TNNetLayer; - FeaturesPerGroup: integer; InputChannelsPerGroup: integer; EachGroupOutput: array of TNNetLayer; GroupCnt: integer; begin - if pInputPadding > 0 then + PreviousLayer := GetLastLayer(); + Result := PreviousLayer; + SetLength(EachGroupOutput, Groups); + InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Groups; + if Groups = 1 then begin - PreviousLayer := AddLayer( TNNetPad.Create(pInputPadding) ); - end - else + Result := AddLayer( TNNetDotProducts.Create(A, B) ); + end; + if Groups > 1 then begin - PreviousLayer := GetLastLayer(); + for GroupCnt := 0 to Groups - 1 do + begin + if ChannelInterleaving + then AddLayerAfter( TNNetSplitChannelEvery.Create(Groups, GroupCnt), PreviousLayer) + else AddLayerAfter( TNNetSplitChannels.Create(GroupCnt*InputChannelsPerGroup, InputChannelsPerGroup), PreviousLayer); + EachGroupOutput[GroupCnt] := AddLayer( TNNetDotProducts.Create(A, B) ); + end; + Result := AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); end; + SetLength(EachGroupOutput, 0); +end; + +function TNNet.AddGroupedPointwiseSoftMax(Groups: integer; + ChannelInterleaving: boolean): TNNetLayer; +var + PreviousLayer: TNNetLayer; + InputChannelsPerGroup: integer; + EachGroupOutput: array of TNNetLayer; + GroupCnt: integer; +begin + PreviousLayer := GetLastLayer(); Result := PreviousLayer; SetLength(EachGroupOutput, Groups); - FeaturesPerGroup := pNumFeatures div Groups; InputChannelsPerGroup := PreviousLayer.FOutput.Depth div Groups; if Groups = 1 then begin - Result := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, {pInputPadding=}0, pStride, pSuppressBias) ); + Result := AddLayer( TNNetPointwiseSoftMax.Create() ); end; if Groups > 1 then begin @@ -10454,7 +12044,7 @@ function TNNet.AddGroupedConvolution(Conv2d: TNNetConvolutionClass; if ChannelInterleaving then AddLayerAfter( TNNetSplitChannelEvery.Create(Groups, GroupCnt), PreviousLayer) else AddLayerAfter( TNNetSplitChannels.Create(GroupCnt*InputChannelsPerGroup, InputChannelsPerGroup), PreviousLayer); - EachGroupOutput[GroupCnt] := AddLayer( Conv2d.Create(FeaturesPerGroup, pFeatureSize, {pInputPadding=}0, pStride, pSuppressBias) ); + EachGroupOutput[GroupCnt] := AddLayer( TNNetPointwiseSoftMax.Create() ); end; Result := AddLayer( TNNetDeepConcat.Create(EachGroupOutput) ); end; @@ -10860,6 +12450,11 @@ procedure TNNet.AddToWeightAverage(NewElement: TNNet; CurrentElementCount: integ MulMulAddWeights(CurrentElementCount/(CurrentElementCount+1), 1/(CurrentElementCount+1), NewElement); end; +function TNNet.GetFirstLayer: TNNetLayer; +begin + Result := FLayers[0]; +end; + function TNNet.AddLayerAfter(pLayer, pAfterLayer: TNNetLayer): TNNetLayer; begin if Assigned(pAfterLayer) then @@ -11484,6 +13079,84 @@ function TNNet.NormalizeMaxAbsoluteDelta(NewMax: TNeuralFloat): TNeuralFloat; end; end; +function TNNet.NormalizeMinAbsoluteDeltaPerLayer(MinDelta: TNeuralFloat + ): TNeuralFloat; +var + LayerCnt, LastLayerIdx: integer; + MaxAbsDelta: TNeuralFloat; +begin + LastLayerIdx := GetLastLayerIdx(); + Result := 1; + if FLayers.Count > 0 then + begin + for LayerCnt := 0 to LastLayerIdx do + begin + if not(FLayers[LayerCnt].LinkedNeurons) then + begin + MaxAbsDelta := FLayers[LayerCnt].GetMaxAbsoluteDelta(); + if (MaxAbsDelta < MinDelta) and (MaxAbsDelta > 0) then + begin + FLayers[LayerCnt].MulDeltas( MinDelta/MaxAbsDelta ); + Result := Max(Result, MinDelta/MaxAbsDelta ); + FMaxDeltaLayer := LayerCnt; + end; + end; + end; + end; +end; + +function TNNet.NormalizeMinMaxAbsoluteDeltaPerLayer(MinDelta, + MaxDelta: TNeuralFloat): TNeuralFloat; +var + LayerCnt, LastLayerIdx: integer; + MaxAbsDelta: TNeuralFloat; +begin + LastLayerIdx := GetLastLayerIdx(); + Result := 1; + if FLayers.Count > 0 then + begin + for LayerCnt := 0 to LastLayerIdx do + begin + if not(FLayers[LayerCnt].LinkedNeurons) then + begin + MaxAbsDelta := FLayers[LayerCnt].GetMaxAbsoluteDelta(); + if (MaxAbsDelta > 0) then + begin + if (MaxAbsDelta < MinDelta) then + begin + FLayers[LayerCnt].MulDeltas( MinDelta/MaxAbsDelta ); + //WriteLn(LayerCnt, ' Force Min:', (MinDelta/MaxAbsDelta):8:4); + end + else if (MaxAbsDelta > MaxDelta) then + begin + FLayers[LayerCnt].MulDeltas( MaxDelta/MaxAbsDelta ); + Result := Min(Result, MaxDelta/MaxAbsDelta ); + FMaxDeltaLayer := LayerCnt; + //WriteLn(LayerCnt, ' Force Max:', (MaxDelta/MaxAbsDelta):8:4); + end; + end; + end; + end; + end; +end; + +procedure TNNet.NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); +var + LayerCnt, LastLayerIdx: integer; +begin + LastLayerIdx := GetLastLayerIdx(); + if FLayers.Count > 0 then + begin + for LayerCnt := 0 to LastLayerIdx do + begin + if not(FLayers[LayerCnt].LinkedNeurons) then + begin + FLayers[LayerCnt].NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta); + end; + end; + end; +end; + procedure TNNet.ClearInertia(); var LayerCnt: integer; @@ -11631,6 +13304,26 @@ procedure TNNet.UpdateWeights(); end; end; +procedure TNNet.CalcAdamDelta(); +var + LayerCnt: integer; +begin + for LayerCnt := 0 to GetLastLayerIdx() do + begin + FLayers[LayerCnt].CalcAdamDelta(); + end; +end; + +procedure TNNet.UpdateWeightsAdam(); +var + LayerCnt: integer; +begin + for LayerCnt := 0 to GetLastLayerIdx() do + begin + FLayers[LayerCnt].UpdateWeightsAdam(); + end; +end; + procedure TNNet.ClearDeltas(); var LayerCnt: integer; @@ -11801,6 +13494,11 @@ procedure TNNet.DebugStructure(); if FLayers.Count > 1 then begin + WriteLn( + 'Has AVX: ', FLayers[0].Output.HasAVX, + ' Has AVX2: ', FLayers[0].Output.HasAVX2, + ' Has AVX512: ', FLayers[0].Output.HasAVX512 + ); for LayerCnt := 0 to GetLastLayerIdx() do begin WeightCount := FLayers[LayerCnt].CountWeights(); @@ -11824,6 +13522,10 @@ procedure TNNet.DebugStructure(); ' Weight Sum:', FLayers[LayerCnt].GetWeightSum():8:4, ' Bias Sum:', FLayers[LayerCnt].GetBiasSum():8:4 ); + {$IFDEF OpenCL} + if FLayers[LayerCnt].HasOpenCL then write(' H'); + if FLayers[LayerCnt].HasOpenCL and FLayers[LayerCnt].ShouldOpenCL then write(' OpenCl'); + {$ENDIF} if Assigned(FLayers[LayerCnt].PrevLayer) then begin @@ -11911,7 +13613,7 @@ procedure TNNet.MulAddWeights(Value: TNeuralFloat; Origin: TNNet); function TNNet.SaveDataToString(): string; var LayerCnt: integer; - S: TStringList; + S: TNNetStringList; begin S := CreateTokenizedStringList('!'); if FLayers.Count > 0 then @@ -11921,7 +13623,7 @@ function TNNet.SaveDataToString(): string; S.Add( FLayers[LayerCnt].SaveDataToString() ); end; end; - Result := S.DelimitedText; + Result := S.GetDelimitedTextFast(); S.Free; end; @@ -12043,29 +13745,14 @@ procedure TNNet.LoadFromFile(filename: string); end; function TNNet.Clone(): TNNet; -//var NNData: String; -//begin -// NNData := SaveToString(); -// -// Result := TNNet.Create; -// Result.LoadFromString(NNData); -//end; - -var i : integer; - layStruct : string; +var + NNData: String; begin - // it's not optimal but it covers the basis - layStruct := SaveStructureToString(); - - Result := TNNet.Create; - Result.LoadStructureFromString(layStruct); - - for i := 0 to FLayers.Count - 1 do - begin - // copy weights... basically reproduces LoadDataFromString but without all the overhead - Result.fLayers[i].Assign( FLayers[i] ); - Result.FLayers[i].AfterWeightUpdate; - end; + NNData := SaveStructureToString(); + Result := TNNet.Create; + Result.LoadStructureFromString(NNData); + Result.CopyWeights(Self); + NNData := ''; end; procedure TNNet.LoadDataFromString(strData: string); @@ -12133,7 +13820,7 @@ procedure TNNet.LoadDataFromFile(filename: string); begin FErrorProc ( - 'TNNet.LoadFromString - wrong number of arguments: ' + IntToStr(S.Count) + 'TNNet.LoadDataFromFile - wrong number of arguments: ' + IntToStr(S.Count) ); end; @@ -12596,6 +14283,27 @@ function TNNetLayer.InitSELU(Value: TNeuralFloat): TNNetLayer; Result := Self; end; +function TNNetLayer.InitAdam(Beta1, Beta2, Epsilon: TNeuralFloat): TNNetLayer; +var + Cnt: integer; +begin + FBeta1 := Beta1; + FBeta2 := Beta2; + FEpsilon := Epsilon; + FBeta1Decay := 1; + FBeta2Decay := 1; + + if (FNeurons.Count > 0) then + begin + for Cnt := 0 to FNeurons.Count-1 do + begin + FNeurons[Cnt].InitAdam(Self); + end; + AfterWeightUpdate(); + end; + Result := Self; +end; + procedure TNNetLayer.InitDefault(); begin InitGlorotBengioUniform(); @@ -12727,13 +14435,15 @@ function TNNetLayer.GetMaxAbsoluteDelta(): TNeuralFloat; var Cnt: integer; MaxValue: TNeuralFloat; + MaxNeurons: integer; begin - if FNeurons.Count > 0 then + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then begin Result := FNeurons[0].Delta.GetMaxAbs(); - if FNeurons.Count > 1 then + if MaxNeurons > 0 then begin - for Cnt := 0 to FNeurons.Count-1 do + for Cnt := 0 to MaxNeurons do begin MaxValue := FNeurons[Cnt].Delta.GetMaxAbs(); if MaxValue > Result then Result := MaxValue; @@ -12746,6 +14456,26 @@ function TNNetLayer.GetMaxAbsoluteDelta(): TNeuralFloat; end; end; +procedure TNNetLayer.NormalizeMaxAbsoluteDeltaPerNeuron(MaxDelta: TNeuralFloat); +var + Cnt: integer; + MaxValue: TNeuralFloat; + MaxNeurons: integer; +begin + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then + begin + for Cnt := 0 to MaxNeurons do + begin + MaxValue := FNeurons[Cnt].Delta.GetMaxAbs(); + if (MaxDelta <> MaxValue) and (MaxValue>0) then + begin + FNeurons[Cnt].Delta.Mul(MaxDelta/MaxValue); + end; + end; + end +end; + function TNNetLayer.GetMinDelta(): TNeuralFloat; var Cnt: integer; @@ -13168,15 +14898,32 @@ procedure TNNetLayer.NormalizeWeights(VMax: TNeuralFloat); end; function TNNetLayer.SaveDataToString(): string; +//var +// S: TStringList; +// Cnt: integer; +//begin +// S := TStringList.Create; +// S.Sorted := false; +// S.Delimiter := '['; +// S.StrictDelimiter := true; + +// if FNeurons.Count > 0 then +// begin +// for Cnt := 0 to FNeurons.Count-1 do +// begin +// S.Add(FNeurons[Cnt].SaveToString()); +// end; +// end; + +// Result := S.DelimitedText; +// S.Free; +//end; var - S: TStringList; + S: TNNetStringList; Cnt: integer; begin - S := TStringList.Create; - S.Sorted := false; - S.Delimiter := '['; - S.StrictDelimiter := true; - + S := CreateTokenizedStringList('['); + S.SetCapacity(FNeurons.Count); if FNeurons.Count > 0 then begin for Cnt := 0 to FNeurons.Count-1 do @@ -13185,7 +14932,7 @@ function TNNetLayer.SaveDataToString(): string; end; end; - Result := S.DelimitedText; + Result := S.GetDelimitedTextFast(); S.Free; end; @@ -13222,14 +14969,24 @@ procedure TNNetLayer.LoadDataFromString(strData: string); function TNNetLayer.SaveStructureToString(): string; var I: integer; + fmt : TFormatSettings; begin Result := ClassName + ':'; + fmt := GetDefaultNumericFormat; for I := Low(FStruct) to High(FStruct) do begin if I > 0 then Result := Result + ';'; Result := Result + IntToStr(FStruct[I]); end; + + Result := Result + '::'; + + for I := Low(FFloatSt) to High(FFloatSt) do + begin + if I > 0 then Result := Result + ';'; + Result := Result + FloatToStr(FFloatSt[I], fmt); + end; end; procedure TNNetLayer.SetBatchUpdate(pBatchUpdate: boolean); @@ -13238,6 +14995,49 @@ procedure TNNetLayer.SetBatchUpdate(pBatchUpdate: boolean); end; procedure TNNetLayer.UpdateWeights(); +var + Cnt, MaxNeurons: integer; +begin + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then + begin + if FInertia > 0 then + begin + for Cnt := 0 to MaxNeurons do + begin + FNeurons[Cnt].UpdateWeights(FInertia); + end; + end + else + begin + for Cnt := 0 to MaxNeurons do + begin + FNeurons[Cnt].UpdateWeightsWithoutInertia(); + end; + end; + end; + AfterWeightUpdate(); +end; + +procedure TNNetLayer.CalcAdamDelta(); +var + Cnt, MaxNeurons: integer; +begin + MaxNeurons := FNeurons.Count - 1; + if MaxNeurons >= 0 then + begin + FBeta1Decay := FBeta1Decay * FBeta1; + FBeta2Decay := FBeta2Decay * FBeta2; + FOneMinusBeta1Decay := (1 - FBeta1Decay); + FOneMinusBeta2Decay := (1 - FBeta2Decay); + for Cnt := 0 to MaxNeurons do + begin + FNeurons[Cnt].CalcAdamDelta(); + end; + end; +end; + +procedure TNNetLayer.UpdateWeightsAdam(); var Cnt, MaxNeurons: integer; begin @@ -13246,7 +15046,7 @@ procedure TNNetLayer.UpdateWeights(); begin for Cnt := 0 to MaxNeurons do begin - FNeurons[Cnt].UpdateWeights(FInertia); + FNeurons[Cnt].UpdateWeightsAdam(); end; end; AfterWeightUpdate(); @@ -13369,13 +15169,17 @@ constructor TNNetNeuron.Create(); FBiasDelta := 0; FWeights := TNNetVolume.Create(1,1,1); FBackInertia := TNNetVolume.Create(1,1,1); + FBackInertia2 := TNNetVolume.Create(1,1,1); FDelta := TNNetVolume.Create(1,1,1); + FDelta2 := TNNetVolume.Create(1,1,1); end; destructor TNNetNeuron.Destroy(); begin FDelta.Free; + FDelta2.Free; FBackInertia.Free; + FBackInertia2.Free; FWeights.Free; inherited Destroy(); end; @@ -13385,6 +15189,7 @@ procedure TNNetNeuron.InitUniform(Value: TNeuralFloat = 1); FWeights.InitUniform(Value); FBiasWeight := 0; FBackInertia.Fill(0); + FBackInertia2.Fill(0); FDelta.Fill(0); FBiasInertia := 0; FBiasDelta := 0; @@ -13395,6 +15200,7 @@ procedure TNNetNeuron.InitGaussian(Value: TNeuralFloat); FWeights.InitGaussian(Value); FBiasWeight := 0; FBackInertia.Fill(0); + FBackInertia2.Fill(0); FDelta.Fill(0); FBiasInertia := 0; FBiasDelta := 0; @@ -13456,6 +15262,16 @@ procedure TNNetNeuron.InitSELU(Value: TNeuralFloat); InitGaussian( Value * Sqrt(1/FWeights.Size) ); end; +procedure TNNetNeuron.InitAdam(ParentLayer: TNNetLayer); +begin + FBackInertia2.Resize(FBackInertia); + FDelta2.Resize(FDelta); + FBackInertia2.Fill(0); + FDelta2.Fill(0); + FBiasInertia2 := 0; + FParentLayer := ParentLayer; +end; + procedure TNNetNeuron.Fill(Value: TNeuralFloat); begin FWeights.Fill(Value) ; @@ -13474,13 +15290,79 @@ procedure TNNetNeuron.AddInertia(); // (BackInertia*Inertia) + (Delta*(1-Inertia)) procedure TNNetNeuron.UpdateWeights(Inertia:TNeuralFloat); begin - FBiasDelta := FBiasDelta * ( 1 - Inertia ); - FBiasInertia := FBiasInertia * Inertia; - FBiasInertia := FBiasInertia + FBiasDelta; - FBiasWeight := FBiasWeight + FBiasInertia; + if (Inertia>0) then + begin + FBiasDelta := FBiasDelta * ( 1 - Inertia ); + FBiasInertia := FBiasInertia * Inertia; + FBiasInertia := FBiasInertia + FBiasDelta; + FBiasWeight := FBiasWeight + FBiasInertia; - FBackInertia.MulMulAdd(Inertia, 1-Inertia, FDelta); - FWeights.Add(FBackInertia); + FBackInertia.MulMulAdd(Inertia, 1-Inertia, FDelta); + FWeights.Add(FBackInertia); + end + else + begin + FWeights.Add(FDelta); + FBiasWeight := FBiasWeight + FBiasDelta; + end; + ClearDelta(); +end; + +procedure TNNetNeuron.UpdateWeightsWithoutInertia(); +begin + FWeights.Add(FDelta); + FBiasWeight := FBiasWeight + FBiasDelta; + ClearDelta(); +end; + +// https://github.com/theroyakash/Adam +// https://github.com/theroyakash/Adam/blob/master/src/Screen%20Shot%202020-02-05%20at%2010.23.14%20PM.png +procedure TNNetNeuron.CalcAdamDelta(); +begin + // Weights Update + FDelta2.Copy(FDelta); + FDelta2.Mul(FDelta2); + + FBackInertia.MulMulAdd(FParentLayer.FBeta1, 1-FParentLayer.FBeta1, FDelta); + FBackInertia2.MulMulAdd(FParentLayer.FBeta2, 1-FParentLayer.FBeta2, FDelta2); + (* + if random(100)=00 then + WriteLn( + 'D1:', FDelta.GetMaxAbs():8:4, + ' D2:', FDelta2.GetMaxAbs():8:4, + ' I1:', FBackInertia.GetMaxAbs():8:4, + ' I2:', FBackInertia2.GetMaxAbs():8:4 + ); + *) + FDelta2.Copy(FBackInertia2); + FDelta2.Divi(FParentLayer.FOneMinusBeta2Decay); + FDelta2.VSqrt(); + FDelta2.Add(FParentLayer.FEpsilon); + + FDelta.Fill(FParentLayer.FLearningRate/FParentLayer.FOneMinusBeta1Decay); + FDelta.Mul(FBackInertia); + FDelta.Divi(FDelta2); + (* + if random(100)=00 then + WriteLn( + 'CALC D1:', FDelta.GetMaxAbs():8:4, + ' CALC D2:', FDelta2.GetMaxAbs():8:4 + ); + *) + + // Bias Update + FBiasInertia := FParentLayer.FBeta1 * FBiasInertia + (1 - FParentLayer.FBeta1) * FBiasDelta; + FBiasInertia2 := FParentLayer.FBeta2 * FBiasInertia2 + (1 - FParentLayer.FBeta2) * (FBiasDelta*FBiasDelta); + + FBiasDelta := + FParentLayer.FLearningRate*( (FBiasInertia/FParentLayer.FOneMinusBeta1Decay) / (sqrt(FBiasInertia2/FParentLayer.FOneMinusBeta2Decay)+FParentLayer.FEpsilon) ) ; +end; + +procedure TNNetNeuron.UpdateWeightsAdam(); +begin + // CalcAdamDelta() must be called before UpdateWeightsAdam; + FWeights.Add(FDelta); + FBiasWeight := FBiasWeight + FBiasDelta; ClearDelta(); end; @@ -13510,11 +15392,14 @@ procedure TNNetNeuron.Assign(neuron: TNNetNeuron); begin FWeights.Copy( neuron.fWeights ); FBackInertia.Copy(neuron.fBackInertia); + FBackInertia2.Copy(neuron.FBackInertia2); FDelta.Copy(neuron.FDelta); + FDelta2.Copy(neuron.FDelta2); FBiasWeight := neuron.fBiasWeight; FBiasInertia := neuron.FBiasInertia; FBiasDelta := neuron.FBiasDelta; + FBiasInertia2 := neuron.FBiasInertia2; end; constructor TEasyBytePredictionViaNNet.Create(pActionByteLen, @@ -14224,6 +16109,35 @@ procedure TNNetDataParallelism.SetItem(Index: Integer; AObject: TNNet); end; {$ENDIF} +{ TNNetAddNoiseBaseRnd } + +constructor TNNetAddNoiseBaseRnd.Create; +begin + inherited; + + {$IFDEF THREADSAVERANDOM} + fRandEng := TRandomGenerator.Create(raChaCha); + fRandeng.Init(0); + {$ENDIF} +end; + +destructor TNNetAddNoiseBaseRnd.Destroy; +begin + {$IFDEF THREADSAVERANDOM} + fRandEng.Free; + {$ENDIF} + inherited; +end; + +function TNNetAddNoiseBaseRnd.Random(range: integer): integer; +begin + {$IFDEF THREADSAVERANDOM} + Result := fRandEng.RandInt(range); + {$ELSE} + Result := System.Random(range); + {$ENDIF} +end; + {$IFNDEF Debug} initialization diff --git a/neural/neuralthread.pas b/neural/neuralthread.pas index cf6f5c0c..a2642ce0 100644 --- a/neural/neuralthread.pas +++ b/neural/neuralthread.pas @@ -56,7 +56,7 @@ interface uses Classes, SysUtils, {$IFDEF FPC} - fgl, MTPCPU + fgl {$IFNDEF WINDOWS} , MTPCPU {$ENDIF} {$IFDEF WINDOWS} ,windows {$ELSE} @@ -126,6 +126,7 @@ TNeuralThreadList = class (TObjectList<TNeuralThread>) procedure NeuralInitCriticalSection(var pCritSec: TRTLCriticalSection); procedure NeuralDoneCriticalSection(var pCritSec: TRTLCriticalSection); function GetProcessId(): {$IFDEF FPC}integer{$ELSE}integer{$ENDIF}; + procedure DebugThreadCount(); implementation @@ -173,9 +174,21 @@ procedure CreateNeuralThreadListIfRequired(); end; end; +{$UNDEF IMPORT_AFFINITAPI} +{$IFDEF MSWINDOWS} +{$IFNDEF FPC} {$IF CompilerVersion <= 23} +{$DEFINE IMPORT_AFFINITAPI} +{$IFEND} +{$ELSE} +{$DEFINE IMPORT_AFFINITAPI} +{$ENDIF} +{$ENDIF} + + +{$IFDEF IMPORT_AFFINITAPI} // delphi 2010 does not define the following functions and constants -{$IFDEF MSWINDOWS} + const ALL_PROCESSOR_GROUPS = $ffff; // @@ -203,7 +216,6 @@ function SetThreadGroupAffinity(hThread: THandle; var GroupAffinity: TGroupAffin function GetThreadGroupAffinity(hThread: THandle; var GroupAffinity: TGroupAffinity): ByteBool; stdcall; external kernel32 name 'GetThreadGroupAffinity'; function GetActiveProcessorGroupCount: WORD; stdcall; external kernel32 name 'GetThreadGroupAffinity'; {$ENDIF} -{$IFEND} function NeuralDefaultThreadCount: integer; begin @@ -238,6 +250,11 @@ function GetProcessId(): integer; end; {$ENDIF} +procedure DebugThreadCount; +begin + WriteLn('CPU threads reported by the operating system: ', NeuralDefaultThreadCount,'.'); +end; + function fNTL: TNeuralThreadList; begin Result := vNTL; diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index f7ce2819..8494e9be 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -111,7 +111,7 @@ TVolume = class(TObject) FSizeY: integer; FDepth: integer; FTag: array[0..1] of integer; - FFormatSettings: TFormatSettings; + FLastPos: integer; function GetTag: integer; {$IFDEF Release} inline; {$ENDIF} procedure SetTag(I: integer); {$IFDEF Release} inline; {$ENDIF} @@ -150,6 +150,9 @@ TVolume = class(TObject) procedure AddAtDepth(pDepth: integer; Value: T); overload; {$IFDEF Release} inline; {$ENDIF} procedure AddAtDepth(pDepth: integer; Original: TVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure AddFromDepthToDepth(Original: TVolume; FromDepth, ToDepth: integer); {$IFDEF Release} inline; {$ENDIF} + procedure AddTransposingXD(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} + procedure AddTransposingYD(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} + procedure AddTransposingAs2D(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} procedure CopyFromDepthToDepth(Original: TVolume; FromDepth, ToDepth: integer); {$IFDEF Release} inline; {$ENDIF} procedure AddLayers(A,B: TVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure Sub(x, y, d: integer; Value: T); overload; {$IFDEF Release} inline; {$ENDIF} @@ -196,11 +199,21 @@ TVolume = class(TObject) procedure Copy(var Original: array of T); overload; procedure Copy(var Original: array of byte); overload; procedure Copy(Original: TBits; pFlase: T = -0.5; pTrue: T = +0.5); overload; - procedure CopyPadding(Original: TVolume; Padding: integer); {$IFDEF Release} inline; {$ENDIF} + procedure CopyPadding(Original: TVolume; Padding: integer); overload; {$IFDEF Release} inline; {$ENDIF} + procedure CopyPadding(Original: TVolume; PaddingX, PaddingY: integer); overload; {$IFDEF Release} inline; {$ENDIF} procedure CopyCropping(Original: TVolume; StartX, StartY, pSizeX, pSizeY: integer); procedure CopyResizing(Original: TVolume; NewSizeX, NewSizeY: integer); - procedure CopyNoChecks(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} + procedure CopyNoChecks(Original: TVolume); overload; {$IFDEF Release} inline; {$ENDIF} + procedure CopyNoChecks(var Original: array of byte); overload; + procedure CopyNoChecksIntArr(var Original: array of integer); overload; + procedure CopyReversedNoChecksIntArr(var Original: array of integer); overload; + procedure CopyNoChecks(var Original: string); overload; + procedure CopyReversedNoChecks(var Original: string); overload; procedure CopyChannels(Original: TVolume; aChannels: array of integer); + // Transpose Copying + procedure CopyTransposingXD(Original: TVolume); + procedure CopyTransposingYD(Original: TVolume); + procedure CopyTransposingAs2D(Original: TVolume); procedure Define(Original: array of T); function DotProduct(Original: TVolume): T; overload; {$IFDEF Release} inline; {$ENDIF} class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; @@ -232,6 +245,8 @@ TVolume = class(TObject) function GetSmallestIdxInRange(StartPos, Len: integer): integer; function GetStdDeviation(): T; {$IFDEF Release} inline; {$ENDIF} function GetMagnitude(): T; {$IFDEF Release} inline; {$ENDIF} + function GetEntropy(): T; + function GetPerplexity(): T; procedure FlipX(); procedure FlipY(); procedure IncTag(); {$IFDEF Release} inline; {$ENDIF} @@ -272,17 +287,31 @@ TVolume = class(TObject) procedure LoadFromString(strData: string); // bit operations - procedure CopyAsBits(var Original: array of byte; pFlase: T = -0.5; pTrue: T = +0.5); overload; + procedure CopyAsBits(var Original: array of byte; pFalse: T = -0.5; pTrue: T = +0.5; CanResize: boolean = True); overload; + procedure CopyAsBits(Original: string; pFalse: T = -0.5; pTrue: T = +0.5; CanResize: boolean = True); overload; + procedure CopyAsBitsReversed(Original: string; pFalse: T = -0.5; pTrue: T = +0.5); procedure ReadAsBits(var Dest: array of byte; Threshold: T = 0.0); - // Classification Functions + // Classification Functions (SetClass is similar to One Hot Encoding) procedure SetClass(pClass: integer; value: T); {$IFNDEF FPC} overload; {$ENDIF} procedure SetClass(pClass: integer; TrueValue, FalseValue: T); {$IFNDEF FPC} overload; {$ENDIF} procedure SetClassForHiperbolicTangent(pClass: integer); procedure SetClassForReLU(pClass: integer); procedure SetClassForSoftMax(pClass: integer); + // GetClass is similar to argmax function GetClass(): integer; + function GetClassOnPixel(X, Y: integer): integer; function SoftMax(): T; + procedure PointwiseSoftMax(); + + // Encoding Functions + procedure OneHotEncoding(aTokens: array of integer); overload; + procedure OneHotEncoding(aTokens: string); overload; + procedure OneHotEncodingReversed(aTokens: string); overload; + procedure OneHotEncodingReversed(var aTokens: array of integer); overload; + // Sets positional embedding as per paper "Attention Is All You Need". + // https://arxiv.org/abs/1706.03762 . + procedure PositionalEncoding(n: integer = 10000); // Color Encoding Functions procedure RgbToHsv(); {$IFDEF Release} inline; {$ENDIF} @@ -308,8 +337,16 @@ TVolume = class(TObject) property SizeX: integer read FSizeX; property SizeY: integer read FSizeY; property Depth: integer read FDepth; + + end; + + TNNetToken = record + Token: Integer; + Score: TNeuralFloat; end; + TNNetTokenArray = array of TNNetToken; + { TNNetVolume } {$IFDEF FPC} TNNetVolume = class (specialize TVolume<TNeuralFloat>) @@ -323,9 +360,53 @@ TNNetVolume = class (TVolume) function GetMemSize(): integer; {$IFDEF Release} inline; {$ENDIF} procedure CalculateLocalResponseFrom2D(Original: TNNetVolume; pSize:integer; alpha, beta: TNeuralFloat ); procedure CalculateLocalResponseFromDepth(Original: TNNetVolume; pSize:integer; alpha, beta: TNeuralFloat ); + procedure GetTokenArray(var TokenArray: TNNetTokenArray); + (* + Assume that "As" and "Bs" contain lists of vectors "A" and "B". + "NumAs and NumBs" are the number of elements in the + The DotProducts function runs dot products for all combinations of "As" and "Bs". + "Convolutions" are "dot products". + Assume 3 matrixes 2x2 of the type TNNetVolume: A, B and B transposed (BT) + Assume c,d,e,f,x,y,z,w are of the type TNeuralFloat. + + These are the matrixes A, B and BT (B Transposed): + A B BT + c d x y x z + e f z w y w + + A = [c, d, e, f] + B = [x, y, z, w] + + a1 = [c, d] + a2 = [e, f] + + b1 = [x, y] + b2 = [z, w] + + bt1 = [x, z] + bt2 = [y, w] + + A = [a1 , a2] + B = [b1 , b2] + BT = [bt1, bt2] + + * denotes "dot product". + The result of DotProducts (2, 2, 2, A, B) will be: [a1* b1, a2* b1, a1* b2, a2* b2] + The result of a matrix multiplicaton would be: [a1*bt1, a1*bt2, a2*bt1, a2*bt2] + The result of DotProducts (2, 2, 2, A, BT)will be: [a1*bt1, a2*bt1, a1*bt2, a2*bt2] + The transposed result of DotProducts (2, 2, 4, A, BT) will be the same as a matrix multiplication AB. + OR + Given that (A B)T = (BT AT), + The result of DotProducts (2, 2, 2, BT, A) is the same as a matrix multiplication AB. + This interpretation is valid for the functions: + * InterleavedDotProduct + * DotProducts + * DotProductsTiled + *) procedure InterleavedDotProduct(InterleavedAs, B:TNNetVolume); overload; procedure InterleavedDotProduct(InterleavedAs, Bs:TNNetVolume; VectorSize: integer); overload; procedure DotProducts(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume); + procedure DotProductsPointwise(VAs, VBs: TNNetVolume); procedure DotProductsTiled(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); procedure GroupedDotProductsTiled(Groups, NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume; TileSizeA, TileSizeB: integer); procedure AddArea(DestX, DestY, OriginX, OriginY, LenX, LenY: integer; Original: TNNetVolume); @@ -333,9 +414,16 @@ TNNetVolume = class (TVolume) function HasAVX2: boolean; function HasAVX512: boolean; function PearsonCorrelation(Y : TNNetVolume): TNeuralFloat; + // AddSumChannel adds the sum of each channel to the current 1D array. procedure AddSumChannel(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} + // AddSumSqrChannel is designed to compute the sum of the squares of elements + // channel-wise from Original and add this sum to the current volume. procedure AddSumSqrChannel(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} + // AddToChannels receives an 1D array (Original). Each element in Original + // will be summed to the entire XY 2D slice at the same depth. procedure AddToChannels(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} + // MulChannels receives an 1D array (Original). Each element in Original + // will multiply the entire XY 2D slice at the same depth. procedure MulChannels(Original: TNNetVolume); {$IFDEF Release} inline; {$ENDIF} procedure Mul(Original: TNNetVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure NormalizeMax(Value: TNeuralFloat); {$IFDEF Release} inline; {$ENDIF} @@ -364,7 +452,8 @@ TNNetVolume = class (TVolume) procedure Divi(Value: Single); overload; {$IFDEF Release} inline; {$ENDIF} procedure Copy(Original: TNNetVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure CopyRelu(Original: TNNetVolume); overload; {$IFDEF Release} inline; {$ENDIF} - procedure CopyPadding(Original: TNNetVolume; Padding: integer); + procedure CopyPadding(Original: TNNetVolume; Padding: integer); overload; + procedure CopyPadding(Original: TNNetVolume; PaddingX, PaddingY: integer); {$IFDEF Release} inline; {$ENDIF} overload; procedure CopyNoChecks(Original: TNNetVolume); function GetSum(): TNeuralFloat; override; function GetSumSqr(): TNeuralFloat; override; @@ -376,6 +465,42 @@ TNNetVolume = class (TVolume) DataPtr: TNeuralFloatArrPtr read FDataPtr; end; + { TNNetSamplerBase } + + TNNetSamplerBase = class(TObject) + protected + FTokenArr: TNNetTokenArray; + public + function GetToken(Origin: TNNetVolume): integer; virtual; abstract; + procedure SortTokenArray(); + destructor Destroy(); override; + end; + + { TNNetSamplerGreedy } + TNNetSamplerGreedy = class (TNNetSamplerBase) + public + function GetToken(Origin: TNNetVolume): integer; override; + end; + + { TNNetSamplerTopK } + TNNetSamplerTopK = class (TNNetSamplerBase) + protected + FTopK: integer; + public + constructor Create(TopK: integer); + function GetToken(Origin: TNNetVolume): integer; override; + end; + + { TNNetSamplerTopP } + TNNetSamplerTopP = class (TNNetSamplerBase) + protected + FTopP: TNeuralFloat; + public + constructor Create(TopP: TNeuralFloat); + function GetToken(Origin: TNNetVolume): integer; override; + end; + + /// Implements a pair of volumes TNNetVolumePair = class(TObject) protected @@ -425,6 +550,7 @@ TNNetVolumeList = class (TNNetList) function GetSum(): TNeuralFloat; function GetAvg(): TNeuralFloat; procedure AddValue(Value: TNeuralFloat); + procedure Mul(Value: TNeuralFloat); procedure Divi(Value: TNeuralFloat); function GetClosestId(Original: TNNetVolume; var MinDist: TNeuralFloat): integer; function GetManhattanClosestId(Original: TNNetVolume; var MinDist: TNeuralFloat): integer; @@ -498,18 +624,34 @@ TNNetStringList = class(TStringList) procedure KeepLast(Cnt: integer); procedure DeleteFirst(Cnt: integer); procedure DeleteLast(Cnt: integer); + procedure SetCapacity(NewCapacity: Integer); override; + function GetDelimitedTextFast: string; + procedure LoadLargeFile(Filename: string); end; { TStringListInt } TStringListInt = class(TNNetStringList) private + FTokenizer: TStringList; + FIntegerToStr: array of string; + function GetInteger(Index: Integer): PtrInt; {$IFDEF Release} inline; {$ENDIF} procedure PutInteger(Index: Integer; AValue: PtrInt); {$IFDEF Release} inline; {$ENDIF} public constructor Create; + destructor Destroy; override; + procedure SortByIntegerAsc; procedure SortByIntegerDesc; function AddInteger(const S: string; AValue: PtrInt): integer; {$IFDEF Release} inline; {$ENDIF} + function WordToIndex(pWord:string): integer; + function WordToInteger(pWord:string): integer; + function IntegerToWord(pInteger: integer): string; + procedure SaveCurrentPositionAndSort(); + procedure StringToIndexArray(pString: string; var IntArr: TNeuralIntegerArray); + procedure StringToIntegerArray(pString: string; var IntArr: TNeuralIntegerArray); + function IndexArrayToString(var IntArr: TNeuralIntegerArray): string; + function IntegerArrayToString(var IntArr: TNeuralIntegerArray): string; property Integers[Index: Integer]: PtrInt read GetInteger write PutInteger; end; @@ -601,20 +743,18 @@ TStringStringListVolume = class(TStringsObj) {$ENDIF} { TNNetDictionary } + // This class creates a dictionary where integers contains the frequency. TNNetDictionary = class(TStringListInt) protected - FTokenizer: TStringList; FMaxSize: integer; public constructor Create(pMaxSize: integer); - destructor Destroy; override; function AddWordToDictionary(pWord:string): boolean; function AddWordsToDictionary(pString:string): boolean; procedure AddWordFromCsvField(filename: string; fieldId: integer; SkipFirstLine: boolean = True; Separator:char = ','); procedure RemoveAllStringsWithLessThen(I:integer); - function WordToIndex(pWord:string): integer; procedure StringToVolume(pString: string; Volume: TNNetVolume); function VolumeToString(Volume: TNNetVolume; Threshold: TNeuralFloat = 0.2): string; procedure CsvToTStringVolumeList(filename: string; @@ -625,8 +765,8 @@ TNNetDictionary = class(TStringListInt) procedure LoadDictionaryFromFile(Filename: string; Separator:char = ','); end; - function CreateTokenizedStringList(str: string; c:char):TStringList; overload; - function CreateTokenizedStringList(c:char):TStringList; overload; + function CreateTokenizedStringList(str: string; c:char):TNNetStringList; overload; + function CreateTokenizedStringList(c:char):TNNetStringList; overload; function HiperbolicTangent(x: TNeuralFloat): TNeuralFloat; function HiperbolicTangentDerivative(x: TNeuralFloat): TNeuralFloat; @@ -687,8 +827,13 @@ TNNetDictionary = class(TStringListInt) function NeuralFloatToStr(V: TNeuralFloat): string; function NeuralStrToFloat(V: String): TNeuralFloat; + function GetLastChars(const InputStr: string; LenStr: Integer): string; + procedure TestTNNetVolume(); procedure TestKMeans(); + procedure TestAVX; + + function GetDefaultNumericFormat: TFormatSettings; implementation @@ -699,18 +844,25 @@ implementation {$DEFINE x64} {$ENDIF} -uses {$IFNDEF x64} Neural.AVX {$ELSE} Neural.AVXx64{$ENDIF}, neuralbit, - CPUFeatures; +uses {$IFNDEF x64} NeuralAVX {$ELSE} NeuralAVXx64{$ENDIF}, neuralbit, Math, + CPUFeatures, strutils; + +var locDataFmtSet : TFormatSettings; + +function GetDefaultNumericFormat: TFormatSettings; +begin + Result := locDataFmtSet; +end; -function CreateTokenizedStringList(str: string; c:char):TStringList; +function CreateTokenizedStringList(str: string; c:char):TNNetStringList; begin Result := CreateTokenizedStringList(c); Result.DelimitedText := str; end; -function CreateTokenizedStringList(c: char): TStringList; +function CreateTokenizedStringList(c: char): TNNetStringList; begin - Result := TStringList.Create; + Result := TNNetStringList.Create; Result.Sorted := false; Result.Delimiter := c; Result.StrictDelimiter := true; @@ -1319,6 +1471,23 @@ procedure WriteLnPassIfZero(x: TNeuralFloat; Tolerance: TNeuralFloat=0.0001); else WriteLn(' FAILED.'); end; +// https://machinelearningmastery.com/a-gentle-introduction-to-positional-encoding-in-transformer-models-part-1/ +// Expected result is: +// [[ 0. 1. 0. 1. ] +// [ 0.84147098 0.54030231 0.09983342 0.99500417] +// [ 0.90929743 -0.41614684 0.19866933 0.98006658] +// [ 0.14112001 -0.9899925 0.29552021 0.95533649]] +procedure TestTNNetVolumePositionalEncoding; +var + X: TNNetVolume; +begin + X := TNNetVolume.Create(4,1,4); + X.PositionalEncoding(100); + X.Print(); + X.Free; + readln; +end; + procedure TestTNNetVolume(); var TestSize: integer; @@ -1601,6 +1770,18 @@ function RectifiedLinearUnitDerivative(x: TNeuralFloat): TNeuralFloat; end; // paper: GAUSSIAN ERROR LINEAR UNITS (GELUS) Gimpel et al. 2018 + +// https://www.musicdsp.org/en/latest/Other/178-reasonably-accurate-fastish-tanh-approximation.html + +function Tanh_fast(x : Single) : Single; inline; +var a,b:Single; +begin + x := x*2; + a := abs(x); + b := (6+a*(3+a)); + Result := (x*b)/(a*b+12); +end; + function GaussErrorLinUnit(x : TNeuralFloat) : TNeuralFloat; const cSqrt_2_pi = 0.797884560803; begin @@ -1612,7 +1793,8 @@ function GaussErrorLinUnit(x : TNeuralFloat) : TNeuralFloat; then Result := 0 else - Result := 0.5*x*(1 + tanh( cSqrt_2_pi*( x + 0.044715*x*x*x))); + // Result := 0.5*x*(1 + tanh( cSqrt_2_pi*( x + 0.044715*x*x*x))); + Result := 0.5*x*(1 + 2*Tanh_fast( cSqrt_2_pi*( x + 0.044715*x*x*x))); end; function GaussErrorLinUnitDerivative(x : TNeuralFloat) : TNeuralFloat; @@ -1736,9 +1918,103 @@ function HardSwishDerivative(x: TNeuralFloat): TNeuralFloat; end; end; -{$IFDEF FPC} +procedure QuickSortTokenArray(var A: TNNetTokenArray; iLo, iHi: Integer); +var + Lo, Hi: Integer; + Mid, T: TNNetToken; +begin + Lo := iLo; + Hi := iHi; + Mid := A[(Lo + Hi) div 2]; + repeat + while A[Lo].Score > Mid.Score do Inc(Lo); + while A[Hi].Score < Mid.Score do Dec(Hi); + if Lo <= Hi then + begin + T := A[Lo]; + A[Lo] := A[Hi]; + A[Hi] := T; + Inc(Lo); + Dec(Hi); + end; + until Lo > Hi; + if Hi > iLo then QuickSortTokenArray(A, iLo, Hi); + if Lo < iHi then QuickSortTokenArray(A, Lo, iHi); +end; + +{ TNNetSamplerTopP } + +constructor TNNetSamplerTopP.Create(TopP: TNeuralFloat); +begin + inherited Create(); + FTopP := TopP; +end; + +function TNNetSamplerTopP.GetToken(Origin: TNNetVolume): integer; +var + CumulativeSum: TNeuralFloat; + I, Threshold: Integer; +begin + Origin.GetTokenArray(FTokenArr); + SortTokenArray(); + CumulativeSum := 0; + Threshold := 0; + for I := Low(FTokenArr) to High(FTokenArr) do + begin + CumulativeSum := CumulativeSum + FTokenArr[i].Score; + if CumulativeSum > FTopP then + begin + Threshold := I; + Break; + end; + end; + + // Randomly select one of the top tokens within the threshold. + if Threshold > 0 then + Result := FTokenArr[Random(Threshold)].Token + else + Result := FTokenArr[0].Token; // Fallback in case P is too low. +end; + +{ TNNetSamplerTopK } + +constructor TNNetSamplerTopK.Create(TopK: integer); +begin + inherited Create(); + FTopK := TopK; +end; + +function TNNetSamplerTopK.GetToken(Origin: TNNetVolume): integer; +begin + Origin.GetTokenArray(FTokenArr); + SortTokenArray(); + Result := FTokenArr[Random(FTopK)].Token; +end; + +{ TNNetSamplerBase } + +procedure TNNetSamplerBase.SortTokenArray; +begin + QuickSortTokenArray(FTokenArr, Low(FTokenArr), High(FTokenArr)); +end; + +destructor TNNetSamplerBase.Destroy; +begin + SetLength(FTokenArr, 0); + inherited Destroy; +end; + +{ TNNetSamplerGreedy } + +function TNNetSamplerGreedy.GetToken(Origin: TNNetVolume): integer; +begin + Result := Origin.GetClass(); +end; + { TStringStringList } +{$IFDEF FPC} + procedure TStringStringList.LoadFromCsv(filename: string; SkipFirstLine:boolean = true; KeyId: integer = -1; @@ -1904,6 +2180,88 @@ procedure TNNetStringList.DeleteLast(Cnt: integer); end; end; +procedure TNNetStringList.SetCapacity(NewCapacity: Integer); +begin + inherited SetCapacity(NewCapacity); +end; + +/// Helper function to check if a string contains any character from a set +// This function was coded by chatGPT4. +function StrHasChars(const Str: string; Strict: Boolean; const Chars: TSysCharSet): Boolean; +var + P: PChar; +begin + P := PChar(Str); + while (P^ <> #0) and (not CharInSet(P^, Chars) or Strict) do Inc(P); + Result := P^ <> #0; +end; + +// This function was coded by chatGPT4. +function TNNetStringList.GetDelimitedTextFast: string; +{$IFNDEF FPC} +begin + Result := DelimitedText; +end; +{$ELSE} +var + I: Integer; + S: String; + BreakChars: set of Char; + DoQuote: Boolean; + StringBuilder: TAnsiStringBuilder; +begin + CheckSpecialChars; + if StrictDelimiter then + BreakChars := [#0, QuoteChar, Delimiter] + else + BreakChars := [#0..' ', QuoteChar, Delimiter]; + + StringBuilder := TAnsiStringBuilder.Create(); + try + for I := 0 to Count - 1 do + begin + S := Strings[I]; + DoQuote := AlwaysQuote; + if not DoQuote then + begin + // Quote strings that include BreakChars + DoQuote := StrHasChars(S, True, BreakChars); + end; + if DoQuote and (QuoteChar <> #0) then + StringBuilder.Append(AnsiQuotedStr(S, QuoteChar)) + else + StringBuilder.Append(S); + + if I < Count - 1 then + StringBuilder.Append(Delimiter); + end; + + // Quote empty string + if (StringBuilder.Length = 0) and (Count = 1) and (QuoteChar <> #0) then + StringBuilder.Append(QuoteChar).Append(QuoteChar); + + Result := StringBuilder.ToString; + finally + StringBuilder.Free; + end; +end; +{$ENDIF} + +procedure TNNetStringList.LoadLargeFile(Filename: string); +var + LargeFile: TextFile; + StrLine: string; +begin + AssignFile(LargeFile, Filename); + Reset(LargeFile); + while not Eof(LargeFile) do + begin + ReadLn(LargeFile, StrLine); + Self.Add(StrLine); + end; + CloseFile(LargeFile); +end; + {$IFDEF FPC} { TStringsObj } function TStringsObj.GetList(Index: Integer): TObj; @@ -2075,6 +2433,15 @@ constructor TStringListInt.Create; begin inherited Create; Self.OwnsObjects := false; + FTokenizer := CreateTokenizedStringList(' '); + SetLength(FIntegerToStr, 0); +end; + +destructor TStringListInt.Destroy; +begin + SetLength(FIntegerToStr, 0); + FTokenizer.Free; + inherited Destroy; end; procedure TStringListInt.SortByIntegerAsc; @@ -2102,15 +2469,8 @@ constructor TNNetDictionary.Create(pMaxSize: integer); Self.CaseSensitive := false; FMaxSize := pMaxSize; - - FTokenizer := CreateTokenizedStringList(' '); end; -destructor TNNetDictionary.Destroy; -begin - FTokenizer.Free; - inherited Destroy; -end; function TNNetDictionary.AddWordToDictionary(pWord: string): boolean; var @@ -2214,11 +2574,134 @@ procedure TNNetDictionary.RemoveAllStringsWithLessThen(I: integer); end; end; -function TNNetDictionary.WordToIndex(pWord: string): integer; +function TStringListInt.WordToIndex(pWord: string): integer; begin if not(Self.Find(pWord, Result)) then Result := -1; end; +function TStringListInt.WordToInteger(pWord: string): integer; +var + Position: integer; +begin + if Self.Find(pWord, Position) then + begin + Result := Integers[Position]; + end + else + begin + Result := -1; + end; +end; + +function TStringListInt.IntegerToWord(pInteger: integer): string; +begin + Result := FIntegerToStr[pInteger]; +end; + +procedure TStringListInt.StringToIndexArray(pString: string; + var IntArr: TNeuralIntegerArray); +var + WordCount: integer; + WordIndex: integer; +begin + FTokenizer.DelimitedText := pString; + + if FTokenizer.Count > 0 then + begin + SetLength(IntArr, FTokenizer.Count); + for WordCount := 0 to FTokenizer.Count - 1 do + begin + WordIndex := Self.WordToIndex(FTokenizer[WordCount]); + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordIndex >= 0 then + begin + IntArr[WordCount] := WordIndex; + end; + end; + end; +end; + +procedure TStringListInt.StringToIntegerArray(pString: string; + var IntArr: TNeuralIntegerArray); +var + WordCount: integer; + WordInteger: integer; +begin + FTokenizer.DelimitedText := pString; + + if FTokenizer.Count > 0 then + begin + SetLength(IntArr, FTokenizer.Count); + for WordCount := 0 to FTokenizer.Count - 1 do + begin + WordInteger := Self.WordToInteger(FTokenizer[WordCount]); + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordInteger >= 0 then + begin + IntArr[WordCount] := WordInteger; + end; + end; + end; +end; + +function TStringListInt.IndexArrayToString(var IntArr: TNeuralIntegerArray + ): string; +var + WordCount, WordMax: integer; + WordIndex: integer; +begin + Result := ''; + WordMax := Length(IntArr) - 1; + if WordMax >= 0 then + begin + for WordCount := 0 to WordMax do + begin + WordIndex := IntArr[WordCount]; + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordIndex >= 0 then + begin + Result := Result + Self[WordIndex]; + end; + end; + end; +end; + +function TStringListInt.IntegerArrayToString(var IntArr: TNeuralIntegerArray + ): string; +var + WordCount, WordMax: integer; + WordInteger: integer; +begin + Result := ''; + WordMax := Length(IntArr) - 1; + if WordMax >= 0 then + begin + for WordCount := 0 to WordMax do + begin + WordInteger := IntArr[WordCount]; + //WriteLn(WordIndex,':',FTokenizer[WordCount]); + if WordInteger >= 0 then + begin + Result := Result + FIntegerToStr[WordInteger]; + end; + end; + end; +end; + +procedure TStringListInt.SaveCurrentPositionAndSort(); +var + RowCnt: integer; +begin + SetLength(FIntegerToStr, Self.Count); + for RowCnt := 0 to Self.Count - 1 do + begin + Self.Integers[RowCnt] := RowCnt; + FIntegerToStr[RowCnt] := Self[RowCnt]; + end; + Self.Sort(); + Self.Sorted := true; +end; + procedure TNNetDictionary.StringToVolume(pString: string; Volume: TNNetVolume); var WordCount: integer; @@ -2620,6 +3103,19 @@ procedure TNNetVolumeList.AddValue(Value: TNeuralFloat); end; end; +procedure TNNetVolumeList.Mul(Value: TNeuralFloat); +var + I: integer; +begin + if (Count>0) then + begin + for I := 0 to Count - 1 do + begin + Self[I].Mul(Value); + end; + end; +end; + procedure TNNetVolumeList.Divi(Value: TNeuralFloat); var I: integer; @@ -2732,7 +3228,9 @@ procedure TNNetVolumeList.ConcatInto(V: TNNetVolume); TotalSize := Self.GetTotalSize(); if V.Size <> TotalSize then begin - V.ReSize(TotalSize,1,1); + if TotalSize = Count * Self[0].Size + then V.ReSize(Count,1,Self[0].Size) + else V.ReSize(TotalSize,1,1); end; CurrPos := 0; @@ -2983,9 +3481,6 @@ constructor TVolume.Create(pSizeX, pSizeY, pDepth: integer; c: T); ReSize(pSizeX, pSizeY, pDepth); Fill(c); ClearTag(); - - {$IFDEF FPC} FFormatSettings := DefaultFormatSettings; {$ENDIF} - FFormatSettings.DecimalSeparator := '.'; end; constructor TVolume.Create(pInput: array of T); @@ -3239,6 +3734,86 @@ procedure TVolume.AddFromDepthToDepth(Original: TVolume; FromDepth, end; end; +procedure TVolume.AddTransposingXD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.Depth, Original.SizeY, Original.SizeX); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxY > 0 then + begin + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Add(CntX, CntY, CntD, Original[CntD, CntY, CntX]); + end; + end; + end; + end + else + begin + for CntX := 0 to MaxX do + begin + for CntD := 0 to MaxD do + begin + Add(CntX, 0, CntD, Original[CntD, 0, CntX]); + end; + end; + end; +end; + +procedure TVolume.AddTransposingYD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.SizeX, Original.Depth, Original.SizeY); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxX > 0 then + begin + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Add(CntX, CntY, CntD, Original[CntX, CntD, CntY]); + end; + end; + end; + end + else + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Add(0, CntY, CntD, Original[0, CntD, CntY]); + end; + end; + end; +end; + +procedure TVolume.AddTransposingAs2D(Original: TVolume); +var + OriginalSizeX, OriginalSizeY, OriginalDepth: integer; +begin + OriginalSizeX := Original.SizeX; + OriginalSizeY := Original.SizeY; + OriginalDepth := Original.Depth; + Original.ReSize(OriginalSizeX*OriginalSizeY, 1, OriginalDepth); + AddTransposingXD(Original); + Original.ReSize(OriginalSizeX, OriginalSizeY, OriginalDepth); +end; + procedure TVolume.CopyFromDepthToDepth(Original: TVolume; FromDepth, ToDepth: integer); var @@ -3921,21 +4496,18 @@ procedure TVolume.Resize(pSize: integer); end; procedure TVolume.ReSize(pSizeX, pSizeY, pDepth: integer); +var + NewSize: integer; begin - if - (FSizeX <> pSizeX) or - (FSizeY <> pSizeY) or - (FDepth <> pDepth) or - (Length(FData) = 0) then + NewSize := pSizeX * pSizeY * pDepth; + if (NewSize <> FSize) then begin - FSizeX := pSizeX; - FSizeY := pSizeY; - FDepth := pDepth; - - FSize := FSizeX * FSizeY * FDepth; - + FSize := NewSize; SetLength(FData, FSize); end; + FSizeX := pSizeX; + FSizeY := pSizeY; + FDepth := pDepth; end; procedure TVolume.ReSize(Original: TVolume); @@ -4077,6 +4649,81 @@ procedure TVolume.CopyNoChecks(Original: TVolume); Move(Original.FData[0], Self.FData[0], Self.Size * SizeOf(T)); end; +procedure TVolume.CopyNoChecks(var Original: array of byte); +var + I: integer; + vHigh: integer; +begin + if Length(Original) > 0 then + begin + vHigh := High(Original); + for I := 0 to vHigh do + begin + FData[I] := Original[I]; + end; + end; +end; + +procedure TVolume.CopyNoChecksIntArr(var Original: array of integer); +var + I: integer; + vHigh: integer; +begin + if Length(Original) > 0 then + begin + vHigh := High(Original); + for I := 0 to vHigh do + begin + FData[I] := Original[I]; + end; + end; +end; + +procedure TVolume.CopyReversedNoChecksIntArr(var Original: array of integer); +var + I: integer; + MaxLen: integer; +begin + MaxLen := Length(Original) - 1; + if MaxLen >= 0 then + begin + for I := 0 to MaxLen do + begin + FData[I] := Original[MaxLen - I]; + end; + end; +end; + +procedure TVolume.CopyNoChecks(var Original: string); +var + I: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + for I := 1 to LenOriginal do + begin + FData[I-1] := Ord(Original[I]); + end; + end; +end; + +procedure TVolume.CopyReversedNoChecks(var Original: string); +var + I: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + for I := 1 to LenOriginal do + begin + FData[I-1] := Ord(Original[LenOriginal - I + 1]); + end; + end; +end; + procedure TVolume.CopyChannels(Original: TVolume; aChannels: array of integer); var MaxX, MaxY: integer; @@ -4190,21 +4837,23 @@ procedure TVolume.Copy(Original: TBits; pFlase: T = -0.5; pTrue: T = +0.5); end; end; -procedure TVolume.CopyAsBits(var Original: array of byte; pFlase: T = -0.5; pTrue: T = +0.5); +procedure TVolume.CopyAsBits(var Original: array of byte; pFalse: T = -0.5; pTrue: T = +0.5; CanResize:boolean = True); var I: integer; vHigh: integer; + LenOriginal: integer; aTranslate: array [0..1] of T; begin - if Length(Original) > 0 then + LenOriginal := Length(Original); + if LenOriginal > 0 then begin - if (Length(Original)*8 <> Self.Size) then + if CanResize and (LenOriginal*8 <> Self.Size) then begin - Self.ReSize(Length(Original), 1, 8); + Self.ReSize(LenOriginal, 1, 8); end; - vHigh := Length(Original) * 8 - 1; - aTranslate[0] := pFlase; + vHigh := LenOriginal * 8 - 1; + aTranslate[0] := pFalse; aTranslate[1] := pTrue; for I := 0 to vHigh do @@ -4214,6 +4863,47 @@ procedure TVolume.CopyAsBits(var Original: array of byte; pFlase: T = -0.5; pTru end; end; +procedure TVolume.CopyAsBits(Original: string; pFalse: T; pTrue: T; CanResize:boolean); +var + AB: array of byte; + I: integer; + vHigh: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + SetLength(AB, LenOriginal); + vHigh := LenOriginal; + for I := 1 to vHigh do + begin + AB[I-1] := Min(Ord(Original[I]), 255); + end; + Self.CopyAsBits(AB, pFalse, pTrue, CanResize); + end; +end; + +procedure TVolume.CopyAsBitsReversed(Original: string; pFalse: T; pTrue: T); +var + AB: array of byte; + I: integer; + vHigh: integer; + LenOriginal: integer; +begin + LenOriginal := Length(Original); + if LenOriginal > 0 then + begin + SetLength(AB, LenOriginal); + vHigh := LenOriginal; + for I := 1 to vHigh do + begin + AB[I-1] := Min(Ord(Original[vHigh-I+1]), 255); + end; + Self.CopyAsBits(AB, pFalse, pTrue, False); + SetLength(AB, 0); + end; +end; + (* procedure TVolume.CopyPadding(Original: TVolume; Padding: integer); var @@ -4268,6 +4958,30 @@ procedure TVolume.CopyPadding(Original: TVolume; Padding: integer); end; end; +procedure TVolume.CopyPadding(Original: TVolume; PaddingX, PaddingY: integer); +var + CntY: integer; + NewSizeX, NewSizeY: integer; + MaxY: integer; + RowSize: integer; + SourceRawPos, DestRawPos: integer; +begin + NewSizeX := Original.SizeX + PaddingX * 2; + NewSizeY := Original.SizeY + PaddingY * 2; + MaxY := Original.SizeY - 1; + RowSize := Original.SizeX * Original.Depth * SizeOf(TNeuralFloat); + + Resize(NewSizeX, NewSizeY, Original.Depth); + Fill(0); + + for CntY := 0 to MaxY do + begin + SourceRawPos := Original.GetRawPos(0, CntY, 0); + DestRawPos := GetRawPos(PaddingX, CntY + PaddingY, 0); + Move(Original.FData[SourceRawPos], Self.FData[DestRawPos], RowSize); + end; +end; + procedure TVolume.CopyCropping(Original: TVolume; StartX, StartY, pSizeX, pSizeY: integer); var @@ -4316,18 +5030,98 @@ procedure TVolume.CopyResizing(Original: TVolume; NewSizeX, NewSizeY: integer); for CntX := 0 to MaxX do begin - OrigPosX := Min(OrigMaxX, Round(CntX / RatioX)); - for CntY := 0 to MaxY do + OrigPosX := Min(OrigMaxX, Round(CntX / RatioX)); + for CntY := 0 to MaxY do + begin + OrigPosY := Min(OrigMaxY, Round(CntY / RatioY)); + RawPostDest := GetRawPos(CntX, CntY); + RawPosSource := Original.GetRawPos(OrigPosX, OrigPosY); + Move(Original.FData[RawPosSource], FData[RawPostDest], MoveSizeBytes); + end; + end; + end; +end; + +procedure TVolume.CopyTransposingXD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.Depth, Original.SizeY, Original.SizeX); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxY > 0 then + begin + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Self[CntX, CntY, CntD] := Original[CntD, CntY, CntX]; + end; + end; + end; + end + else + begin + for CntX := 0 to MaxX do + begin + for CntD := 0 to MaxD do + begin + Self[CntX, 0, CntD] := Original[CntD, 0, CntX]; + end; + end; + end; +end; + +procedure TVolume.CopyTransposingYD(Original: TVolume); +var + CntX, CntY, CntD: integer; + MaxX, MaxY, MaxD: integer; +begin + ReSize(Original.SizeX, Original.Depth, Original.SizeY); + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + if MaxX > 0 then + begin + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do + begin + Self[CntX, CntY, CntD] := Original[CntX, CntD, CntY]; + end; + end; + end; + end + else + begin + for CntY := 0 to MaxY do + begin + for CntD := 0 to MaxD do begin - OrigPosY := Min(OrigMaxY, Round(CntY / RatioY)); - RawPostDest := GetRawPos(CntX, CntY); - RawPosSource := Original.GetRawPos(OrigPosX, OrigPosY); - Move(Original.FData[RawPosSource], FData[RawPostDest], MoveSizeBytes); + Self[0, CntY, CntD] := Original[0, CntD, CntY]; end; end; end; end; +procedure TVolume.CopyTransposingAs2D(Original: TVolume); +var + OriginalSizeX, OriginalSizeY, OriginalDepth: integer; +begin + OriginalSizeX := Original.SizeX; + OriginalSizeY := Original.SizeY; + OriginalDepth := Original.Depth; + Original.ReSize(OriginalSizeX*OriginalSizeY, 1, OriginalDepth); + CopyTransposingXD(Original); + Original.ReSize(OriginalSizeX, OriginalSizeY, OriginalDepth); +end; + function TVolume.DotProduct(Original: TVolume): T; begin {$IFDEF Debug} @@ -4781,6 +5575,29 @@ function TVolume.GetMagnitude(): T; Result := Sqrt( Aux ); end; +function TVolume.GetEntropy: T; +var + I, vHigh: integer; + vSum: TNeuralFloat; +begin + vSum := 0; + if FSize > 0 then + begin + vHigh := FSize - 1; + for I := 0 to vHigh do + begin + if FData[I] > 0 then // To avoid log(0) which is undefined + vSum := vSum + (FData[i] * log2(FData[i])); + end; + end; + Result := -vSum; +end; + +function TVolume.GetPerplexity: T; +begin + Result := Power(2, GetEntropy()); +end; + procedure TVolume.FlipX(); var iFrom, iTo: integer; @@ -4855,7 +5672,7 @@ procedure TVolume.ClearTag(); function TVolume.NeuralToStr(V: TNeuralFloat): string; begin - Result := FloatToStr(V, FFormatSettings); + Result := FloatToStr(V, locDataFmtSet); end; procedure TVolume.LoadNonZeroPosIntoTIntegerList(Ints: TIntegerList; @@ -5125,31 +5942,289 @@ function TVolume.GetClass(): integer; end; end; +function TVolume.GetClassOnPixel(X, Y: integer): integer; +var + I: integer; + vHigh: integer; + vMax: T; + Pos: integer; + Value: T; +begin + vHigh := Depth; + if (vHigh>0) then + begin + Result := 0; + Pos := GetRawPos(X, Y); + vMax := FData[Pos]; + for I := 1 to vHigh do + begin + Inc(Pos); + Value := FData[Pos]; + if Value > vMax then + begin + Result := I; + vMax := Value; + end; + end; + end else + begin + Result := -1; + end; +end; + function TVolume.SoftMax(): T; var I: integer; vHigh: integer; LocalValue: T; TotalSum: TNeuralFloat; - MaxValue: T; + MinValue, MaxValue: T; begin MaxValue := GetMax(); + if MaxValue <> 0 then Sub(MaxValue); + MinValue := GetMin(); + TotalSum := 0; - vHigh := High(FData); - for I := 0 to vHigh do + // forces range [-1000,0] + if MinValue <> 0 then + begin + if MinValue < -1000 then Mul( -1000/MinValue ); + vHigh := High(FData); + + for I := 0 to vHigh do + begin + // LocalValue := Exp( NeuronForceRange(FData[I] - MaxValue, 4000) ); + LocalValue := Exp( FData[I] ); + FData[I] := LocalValue; + TotalSum := TotalSum + FData[I]; + end; + + if TotalSum > 0 then + begin + Divi(TotalSum); + end; + end; + + Result := TotalSum; +end; + +procedure TVolume.PointwiseSoftMax; +var + I, StartPointPos: integer; + MaxX, MaxY, MaxD: integer; + CountX, CountY, CountD: integer; + MaxValue: T; + LocalValue: T; + TotalSum: TNeuralFloat; +begin + // TODO: This portion of code can be optimized + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxD := FDepth - 1; + + if MaxD > 0 then + begin + for CountX := 0 to MaxX do + begin + for CountY := 0 to MaxY do + begin + StartPointPos := GetRawPos(CountX, CountY); + I := StartPointPos; + // Find the point max value. + MaxValue := FData[I]; + for CountD := 1 to MaxD do + begin + Inc(I); + if FData[I] > MaxValue + then MaxValue := FData[I]; + end; + TotalSum := 0; + I := StartPointPos; + for CountD := 0 to MaxD do + begin + LocalValue := Exp( NeuronForceRange(FData[I] - MaxValue, 4000) ); + FData[I] := LocalValue; + TotalSum := TotalSum + LocalValue; + Inc(I); + end; + if TotalSum > 0 then + begin + I := StartPointPos; + for CountD := 0 to MaxD do + begin + FData[I] := FData[I] / TotalSum; + Inc(I); + end; + end; + end; + end; + end; +end; + +procedure TVolume.OneHotEncoding(aTokens: array of integer); +var + CntToken, MaxToken, Token: integer; +begin + MaxToken := Length(aTokens) - 1; + Self.Fill(0); + if MaxToken < SizeX then + begin + for CntToken := 0 to MaxToken do + begin + Token := aTokens[CntToken]; + if Token < FDepth then + begin + Self[CntToken, 0, Token] := 1; + end + else + begin + WriteLn('Token '+IntToStr(Token)+' is bigger than Depth '+IntToStr(FDepth)+' at OneHotEncoding.'); + end; + end; + end + else begin - LocalValue := Exp( NeuronForceRange(FData[I] - MaxValue, 4000) ); - FData[I] := LocalValue; - TotalSum := TotalSum + FData[I]; + WriteLn('Token length '+IntToStr(MaxToken + 1)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncoding.'); end; +end; - if TotalSum > 0 then +procedure TVolume.OneHotEncoding(aTokens: string); +var + CntToken, MaxToken, Token: integer; +begin + MaxToken := Length(aTokens); + Self.Fill(0); + if MaxToken <= SizeX then + begin + for CntToken := 1 to MaxToken do + begin + Token := Ord(aTokens[CntToken]); + if Token < FDepth then + begin + Self[CntToken-1, 0, Token] := 1; + end + end; + end + else begin - Divi(TotalSum); + WriteLn('Token length '+IntToStr(MaxToken + 1)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncodingReversed.'); end; +end; - Result := TotalSum; +function GetLastChars(const InputStr: string; LenStr: Integer): string; +begin + if Length(InputStr) > LenStr then + Result := Copy(InputStr, Length(InputStr) - LenStr + 1, LenStr) + else + Result := InputStr; +end; + +procedure TVolume.OneHotEncodingReversed(aTokens: string); +var + CntToken, MaxToken, Token: integer; + LocalTokens: string; +begin + MaxToken := Length(aTokens); + if MaxToken > SizeX then + begin + LocalTokens := GetLastChars(aTokens, SizeX); + MaxToken := Length(aTokens); + end + else + begin + LocalTokens := aTokens; + end; + Self.Fill(0); + if MaxToken > 0 then + begin + {$IFDEF DEBUG} + if Ord(LocalTokens[MaxToken]) < 2 then + begin + WriteLn('A string for prediction should not end with terminal symbol.'); + end; + if Ord(LocalTokens[1]) < 2 then + begin + WriteLn('A string for prediction should not start with terminal symbol.'); + end; + {$ENDIF} + if MaxToken <= SizeX then + begin + for CntToken := 1 to MaxToken do + begin + Token := Ord(LocalTokens[CntToken]); + if Token < FDepth then + begin + Self[MaxToken-CntToken, 0, Token] := 1; + end; + end; + end + else + begin + WriteLn('This should never happend. Token length '+IntToStr(MaxToken)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncodingReversed.'); + end; + end + else + begin + {$IFDEF DEBUG} + WriteLn('Zero len at OneHotEncodingReversed'); + {$ENDIF} + end; +end; + +procedure TVolume.OneHotEncodingReversed(var aTokens: array of integer); +var + CntToken, MaxToken, Token: integer; +begin + MaxToken := Length(aTokens) - 1; + Self.Fill(0); + if MaxToken < SizeX then + begin + for CntToken := 0 to MaxToken do + begin + Token := aTokens[CntToken]; + if Token < FDepth then + begin + Self[MaxToken-CntToken, 0, Token] := 1; + end + else + begin + WriteLn('Token '+IntToStr(Token)+' is bigger than Depth '+IntToStr(FDepth)+' at OneHotEncoding.'); + end; + end; + end + else + begin + WriteLn('Token length '+IntToStr(MaxToken + 1)+' is bigger than Size X '+IntToStr(SizeX)+' at OneHotEncoding.'); + end; +end; + +procedure TVolume.PositionalEncoding(n: integer); +var + Position: Integer; + divTerm: Double; + MaxX, MaxY, MaxDepth: integer; + CntX, CntY, CntDepth: integer; + EmbeddingSize: integer; +begin + EmbeddingSize := FDepth; + MaxX := FSizeX - 1; + MaxY := FSizeY - 1; + MaxDepth := FDepth - 1; + for CntDepth := 0 to MaxDepth do + begin + divTerm := Power(n, (2 * (CntDepth div 2)) / EmbeddingSize); + for CntX := 0 to MaxX do + begin + for CntY := 0 to MaxY do + begin + Position := CntY*FSizeX + CntX; + if CntDepth mod 2 = 0 + then Self[CntX, CntY, CntDepth] := Sin(Position / divTerm) + else Self[CntX, CntY, CntDepth] := Cos(Position / divTerm); + end; + end; + end; end; procedure TVolume.RgbToHsv(); @@ -5166,7 +6241,7 @@ procedure TVolume.RgbToHsv(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5195,7 +6270,7 @@ procedure TVolume.HsvToRgb(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5224,7 +6299,7 @@ procedure TVolume.RgbToHsl(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5252,7 +6327,7 @@ procedure TVolume.HslToRgb(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5280,7 +6355,7 @@ procedure TVolume.RgbToLab(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5309,7 +6384,7 @@ procedure TVolume.LabToRgb(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5333,7 +6408,7 @@ procedure TVolume.RgbToGray(); if Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5357,7 +6432,7 @@ procedure TVolume.GetGrayFromRgb(Rgb: TVolume); if Rgb.Depth >= 3 then begin MaxX := FSizeX - 1; - MaxY := FSizeX - 1; + MaxY := FSizeY - 1; for I := 0 to MaxX do begin @@ -5563,17 +6638,14 @@ procedure TVolume.InitSELU(Value: T); function TVolume.SaveToString(): string; var - S: TStringList; + S: TNNetStringList; I: integer; version: integer; AuxFloat: Single; begin version := 1; - S := TStringList.Create; - S.Sorted := false; - S.Delimiter := ';'; - S.StrictDelimiter := true; - + S := CreateTokenizedStringList(';'); + S.SetCapacity(FSize+10); S.Add( IntToStr(version) ); S.Add( IntToStr(FSizeX) ); S.Add( IntToStr(FSizeY) ); @@ -5582,10 +6654,11 @@ function TVolume.SaveToString(): string; for I := Low(FData) to High(FData) do begin AuxFloat := FData[I]; - S.Add( FloatToStr(AuxFloat, FFormatSettings) ); + S.Add( FloatToStr(AuxFloat, locDataFmtSet) ); end; - Result := S.DelimitedText; + Result := S.GetDelimitedTextFast(); + //Result := S.DelimitedText; S.Free; end; @@ -5628,7 +6701,7 @@ procedure TVolume.LoadFromString(strData: string); begin for I := 4 to S.Count-1 do begin - AuxFloat := StrToFloat(S[I], FFormatSettings); + AuxFloat := StrToFloat(S[I], locDataFmtSet); FData[I-4] := AuxFloat; end; end; @@ -5759,6 +6832,22 @@ procedure TNNetVolume.CalculateLocalResponseFromDepth(Original: TNNetVolume; SqrElements.Free; end; +procedure TNNetVolume.GetTokenArray(var TokenArray: TNNetTokenArray); +var + I, vHigh: integer; +begin + if (Length(TokenArray) <> FSize) then SetLength(TokenArray, FSize); + if FSize > 0 then + begin + vHigh := FSize - 1; + for I := 0 to vHigh do + begin + TokenArray[I].Token:=I; + TokenArray[I].Score:=FData[I]; + end; + end; +end; + procedure TNNetVolume.InterleavedDotProduct(InterleavedAs, B: TNNetVolume); var @@ -5811,7 +6900,31 @@ procedure TNNetVolume.InterleavedDotProduct(InterleavedAs, Bs: TNNetVolume; Inc(CntBVectorSizePlusCntBPos); end; end; +end; + +procedure TNNetVolume.DotProductsPointwise(VAs, VBs: TNNetVolume); +var + VAsCount, VBsCount: integer; +begin + VAsCount := VAs.SizeX * VAs.SizeY; + VBsCount := VBs.SizeX * VBs.SizeY; + if (VAsCount*VBsCount <> FSize) then + begin + Resize(VBsCount, 1, VAsCount); + end; + if (VAs.Depth = VBs.Depth) then + begin + DotProducts(VAsCount, VBsCount, VAs.Depth, VAs, VBs); + end + else + begin + WriteLn( + 'TNNetVolume.DotProductsPointwise - Depths differ '+ + IntToStr(VAs.Depth) + ' ' + + IntToStr(VBs.Depth) + '.' + ); + end; end; procedure TNNetVolume.DotProducts(NumAs, NumBs, VectorSize: integer; VAs, VBs: TNNetVolume); @@ -6655,7 +7768,14 @@ procedure TNNetVolume.AddArea(DestX, DestY, OriginX, OriginY, LenX, PtrB := Original.GetRawPtr(OriginX, OriginY+CntY); Add(PtrA, PtrB, SizeXDepth); end; - end; + end + {$IFDEF Debug} + else + begin + WriteLn('Error at TNNetVolume.AddArea: depth size doesn''t match. ', + Self.Depth, ' ',Original.Depth); + end + {$ENDIF}; end; // ########################################### @@ -9866,6 +10986,31 @@ procedure TNNetVolume.CopyPadding(Original: TNNetVolume; Padding: integer); end; end; +procedure TNNetVolume.CopyPadding(Original: TNNetVolume; PaddingX, PaddingY: integer + ); +var + CntY: integer; + NewSizeX, NewSizeY: integer; + MaxY: integer; + RowSize: integer; + SourceRawPos, DestRawPos: pointer; +begin + NewSizeX := Original.SizeX + PaddingX * 2; + NewSizeY := Original.SizeY + PaddingY * 2; + MaxY := Original.SizeY - 1; + RowSize := Original.SizeX * Original.Depth; + + Resize(NewSizeX, NewSizeY, Original.Depth); + Fill(0); + + for CntY := 0 to MaxY do + begin + SourceRawPos := Original.GetRawPtr(0, CntY, 0); + DestRawPos := GetRawPtr(PaddingX, CntY + PaddingY, 0); + asm_dword_copy; + end; +end; + procedure TNNetVolume.CopyNoChecks(Original: TNNetVolume); var SourceRawPos, DestRawPos: pointer; @@ -9993,6 +11138,68 @@ procedure TNNetVolumePairList.SetItem(Index: Integer; AObject: TNNetVolumePair); end; {$ENDIF} +procedure TestAVX; + function DotProd(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer) : single; + var i: Integer; + begin + Result := 0; + + for i := 0 to NumElements - 1 do + Result := Result + ptrA^[i]*ptrB^[i]; + end; + + procedure MulAdd( PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer; fact : single ); + var i : integer; + begin + for i := 0 to NumElements - 1 do + ptra^[i] := ptra^[i] + fact*PtrB^[i]; + end; +var a, b : Array[0..127] of single; + i: Integer; + c1, c2 : Array[0..127] of single; + j: Integer; + r1, r2 : single; +begin + // test the avx implementation for convolution and muldadd + for i := 0 to High(a) do + begin + a[i] := i/10; + b[i] := (i + 0.1)/5; + end; + + Writeln('AVX Dot Prod'); + for i := 1 to Length(a) do + begin + r1 := DotProd( @a[0], @b[0], i); + r2 := AVXDotProd( @a[0], @b[0], i ); + if not SameValue( r1, r2, 1e-3) then + begin + Writeln('Dot product failed @ index ' + IntToStr(i) ); + exit; + end; + end; + + Writeln('AVX MulAdd'); + for i := 1 to Length(a) do + begin + Move( a[0], c1[0], sizeof(c1)); + Move( a[0], c2[0], sizeof(c2)); + + MulAdd( @c1[0], @b[0], i, 0.2 ); + AVXMulAdd( @c2[0], @b[0], i, 0.2); + + for j := 0 to i - 1 do + begin + if not SameValue( c1[j], c2[j], 1e-5) then + begin + Writeln('MulAdd failed @ index ' + IntToStr(i) + ',' + IntToStr(j) ); + exit; + end; + end; + end; + +end; + // ########################################### // #### Initialize cpu set variables @@ -10002,4 +11209,15 @@ initialization locAVX := IsAVXPresent; locAVX2 := IsFMAPresent; locAVX512 := IsAVX512Present; + + {$IF DEFINED(FPC)} + locFmtSet := DefaultFormatSettings; + {$ELSE} + {$IF (CompilerVersion <= 21)} + GetLocaleFormatSettings(0, locDataFmtSet); + {$ELSE} + locDataFmtSet := TFormatSettings.Create; + {$IFEND} + {$IFEND} + locDataFmtSet.DecimalSeparator := '.'; end. diff --git a/neural/neuralvolumev.pas b/neural/neuralvolumev.pas index a2f712b2..4b1a130c 100644 --- a/neural/neuralvolumev.pas +++ b/neural/neuralvolumev.pas @@ -26,7 +26,7 @@ interface uses Classes, SysUtils, neuralvolume, {$IFDEF FPC}ExtCtrls, Graphics, LCLType, FPImage - {$ELSE} Windows, {$IF CompilerVersion >= 23} VCL.ExtCtrls, VCL.Graphics {$ELSE} ExtCtrls, Graphics {$ENDIF} {$ENDIF}; + {$ELSE} Windows, {$IF CompilerVersion >= 23} VCL.ExtCtrls, VCL.Graphics {$ELSE} ExtCtrls, Graphics {$IFEND} {$ENDIF}; /// saves a bitmap into a file from a handle HWND procedure SaveHandleToBitmap(OutputFileName: string; hWnd: HWND); From 244e2d015227e13c5cd39687ffcd4694808ca090 Mon Sep 17 00:00:00 2001 From: Rabatscher Michael <m.rabatscher@gmail.com> Date: Wed, 29 May 2024 14:23:38 +0200 Subject: [PATCH 12/13] Minor: Delphi release mode had a problem with the inline of MulAddPPVS --- neural/neuralvolume.pas | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index 8494e9be..94b31972 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -118,7 +118,7 @@ TVolume = class(TObject) function GetTags(x: integer): integer; {$IFDEF Release} inline; {$ENDIF} procedure SetTags(x: integer; AValue: integer); {$IFDEF Release} inline; {$ENDIF} class procedure MulAddPPVS(PtrA, PtrB: TNeuralFloatArrPtr; Value: T; - pSize: integer); {$IFDEF Release} inline; {$ENDIF} + pSize: integer); {$IFDEF Release} {$IFDEF FPC} inline; {$ENDIF} {$ENDIF} public // FData was made public to allow other fast operations FData: array of T; From dceb90b88f3f9fe9f51750bf5730e4bad2246752 Mon Sep 17 00:00:00 2001 From: Michael Rabatscher <m.rabatscher@gmail.com> Date: Thu, 10 Oct 2024 16:26:04 +0200 Subject: [PATCH 13/13] AVX512 extensions (Muladd and dotprod) -> integrated dynamically into the framework -> fixed avx512 check in CPUFeatures.pas -> fixed some compiling problems with FPC --- examples/SelfTest/SelfTest.dpr | 10 +- neural/CPUFeatures.pas | 6 +- neural/Neural.AVX.pas | 112 -------- neural/Neural.AVXx64.pas | 120 --------- neural/NeuralAVX.pas | 438 +++++++++++++++++++++++++++++++ neural/NeuralAVXx64.pas | 464 +++++++++++++++++++++++++++++++++ neural/neuralavxcore.pas | 155 +++++++++++ neural/neuraldatasets.pas | 7 +- neural/neuralfit.pas | 2 +- neural/neuralopencl.pas | 9 +- neural/neuralopenclv.pas | 2 +- neural/neuralthread.pas | 32 +++ neural/neuralvolume.pas | 165 +++++++----- 13 files changed, 1206 insertions(+), 316 deletions(-) delete mode 100644 neural/Neural.AVX.pas delete mode 100644 neural/Neural.AVXx64.pas create mode 100644 neural/NeuralAVX.pas create mode 100644 neural/NeuralAVXx64.pas create mode 100644 neural/neuralavxcore.pas diff --git a/examples/SelfTest/SelfTest.dpr b/examples/SelfTest/SelfTest.dpr index 1938c065..b92db271 100644 --- a/examples/SelfTest/SelfTest.dpr +++ b/examples/SelfTest/SelfTest.dpr @@ -7,8 +7,6 @@ uses SysUtils, Math, CPUFeatures in '..\..\neural\CPUFeatures.pas', - Neural.AVX in '..\..\neural\Neural.AVX.pas', - Neural.AVXx64 in '..\..\neural\Neural.AVXx64.pas', neuralab in '..\..\neural\neuralab.pas', neuralabfun in '..\..\neural\neuralabfun.pas', neuralbit in '..\..\neural\neuralbit.pas', @@ -25,9 +23,15 @@ uses neuralplanbuilder in '..\..\neural\neuralplanbuilder.pas', neuralthread in '..\..\neural\neuralthread.pas', neuralvolume in '..\..\neural\neuralvolume.pas', - neuralvolumev in '..\..\neural\neuralvolumev.pas'; + neuralvolumev in '..\..\neural\neuralvolumev.pas', + NeuralAVX in '..\..\neural\NeuralAVX.pas', + NeuralAVXx64 in '..\..\neural\NeuralAVXx64.pas', + neuralavxcore in '..\..\neural\neuralavxcore.pas'; begin + Writeln('Testing Delphi AVX...'); + TestAVX; + WriteLn('Testing Volumes API ...'); TestTNNetVolume(); TestKMeans(); diff --git a/neural/CPUFeatures.pas b/neural/CPUFeatures.pas index 666d3902..7dd4bbe8 100644 --- a/neural/CPUFeatures.pas +++ b/neural/CPUFeatures.pas @@ -208,7 +208,7 @@ procedure InitAVXOSSupportFlags; {$IFDEF FPC}assembler;{$ENDIF} cmp eax, $E6; //1110 0011 = zmm_ymm_xmm = (7 << 5) | (1 << 2) | (1 << 1); jne @@not_supported; {$IFDEF x64} - mov [rip + AVX512_OS_SUPPORT], 1; + mov Byte [rip + AVX512_OS_SUPPORT], 1; {$ELSE} mov AVX512_OS_SUPPORT, 1; {$ENDIF} @@ -218,7 +218,7 @@ procedure InitAVXOSSupportFlags; {$IFDEF FPC}assembler;{$ENDIF} cmp eax, $6; //1110 0011 = check for AVX os support (256bit) in a context switch jne @@endProc; {$IFDEF x64} - mov [rip + AVX_OS_SUPPORT], 1; + mov Byte [rip + AVX_OS_SUPPORT], 1; {$ELSE} mov AVX_OS_SUPPORT, 1; {$ENDIF} @@ -330,7 +330,7 @@ function IsAVXPresent : boolean; function IsAVX512Present : boolean; begin - Result := HW_AVX512F and AVX512_OS_SUPPORT; + Result := HW_AVX512F and AVX512_OS_SUPPORT and HW_AVX512VL; end; function IsFMAPresent : boolean; diff --git a/neural/Neural.AVX.pas b/neural/Neural.AVX.pas deleted file mode 100644 index c121e30f..00000000 --- a/neural/Neural.AVX.pas +++ /dev/null @@ -1,112 +0,0 @@ -unit Neural.AVX; - -// ########################################### -// #### 32 bit intel avx functions -// ########################################### - -interface - -{$IFDEF CPUX64} -{$DEFINE x64} -{$ENDIF} -{$IFDEF cpux86_64} -{$DEFINE x64} -{$ENDIF} -{$IFNDEF x64} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} - -{$ENDIF} - -implementation - -{$IFNDEF x64} - -{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; -// eax = x, edx = y, ecx = N -asm - // iters - imul ecx, -4; - - // helper registers for the mt1, mt2 and dest pointers - sub eax, ecx; - sub edx, ecx; - - {$IFDEF FPC}vxorpd ymm0, ymm0, ymm0;{$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} - - // unrolled loop - @Loop1: - add ecx, 128; - jg @loopEnd1; - - {$IFDEF FPC}vmovupd ymm1, [eax + ecx - 128];{$ELSE}db $C5,$FD,$10,$4C,$08,$80;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [edx + ecx - 128];{$ELSE}db $C5,$FD,$10,$54,$0A,$80;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [eax + ecx - 96];{$ELSE}db $C5,$FD,$10,$5C,$08,$A0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [edx + ecx - 96];{$ELSE}db $C5,$FD,$10,$64,$0A,$A0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm1, [eax + ecx - 64];{$ELSE}db $C5,$FD,$10,$4C,$08,$C0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [edx + ecx - 64];{$ELSE}db $C5,$FD,$10,$54,$0A,$C0;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [eax + ecx - 32];{$ELSE}db $C5,$FD,$10,$5C,$08,$E0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [edx + ecx - 32];{$ELSE}db $C5,$FD,$10,$64,$0A,$E0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - jmp @Loop1; - - @loopEnd1: - - {$IFDEF FPC}vextractf128 xmm2, ymm0, 1;{$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm2;{$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} - - sub ecx, 128; - jz @loop2End; - - // loop to get all fitting into an array of 4 - @Loop2: - add ecx, 16; - jg @Loop2End; - - {$IFDEF FPC}vmovupd xmm3, [eax + ecx - 16];{$ELSE}db $C5,$F9,$10,$5C,$08,$F0;{$ENDIF} - {$IFDEF FPC}vmovupd xmm4, [edx + ecx - 16];{$ELSE}db $C5,$F9,$10,$64,$0A,$F0;{$ENDIF} - {$IFDEF FPC}vmulps xmm3, xmm3, xmm4;{$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps xmm0, xmm0, xmm3;{$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} - jmp @Loop2; - - @Loop2End: - - // handle last 2 elements - sub ecx, 16; - jz @loop3End; - - @loop3: - add ecx, 4; - jg @loop3End; - - {$IFDEF FPC}vmovss xmm3, [eax + ecx - 4];{$ELSE}db $C5,$FA,$10,$5C,$08,$FC;{$ENDIF} - {$IFDEF FPC}vmovss xmm4, [edx + ecx - 4];{$ELSE}db $C5,$FA,$10,$64,$0A,$FC;{$ENDIF} - {$IFDEF FPC}vmulss xmm3, xmm3, xmm4;{$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddss xmm0, xmm0, xmm3;{$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} - - jmp @loop3; - @loop3End: - - // build result - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vzeroupper;{$ELSE}db $C5,$F8,$77;{$ENDIF} - movss Result, xmm0; -end; - -{$ENDIF} - -end. diff --git a/neural/Neural.AVXx64.pas b/neural/Neural.AVXx64.pas deleted file mode 100644 index f4c1c870..00000000 --- a/neural/Neural.AVXx64.pas +++ /dev/null @@ -1,120 +0,0 @@ -unit Neural.AVXx64; - -// ########################################### -// #### 64 bit intel avx functions -// ########################################### - -interface - -{$IFDEF CPUX64} -{$DEFINE x64} -{$ENDIF} -{$IFDEF cpux86_64} -{$DEFINE x64} -{$ENDIF} -{$IFDEF x64} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC}assembler;{$ENDIF} - -{$ENDIF} - -implementation - -{$IFDEF x64} - -{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} - -function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; -asm - {$IFDEF UNIX} - // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI - // The parameters are passed in the following order: - // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 - mov r8, rdx; - mov rdx, rsi; - mov rcx, rdi; - {$ENDIF} - - // iters - imul r8, -4; - - // helper registers for the mt1, mt2 and dest pointers - sub rcx, r8; - sub rdx, r8; - - {$IFDEF FPC}vxorpd ymm0, ymm0, ymm0;{$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} - - // unrolled loop - @Loop1: - add r8, 128; - jg @loopEnd1; - - {$IFDEF FPC}vmovupd ymm1, [rcx + r8 - 128];{$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$80;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [rdx + r8 - 128];{$ELSE}db $C4,$A1,$7D,$10,$54,$02,$80;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [rcx + r8 - 96];{$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$A0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [rdx + r8 - 96];{$ELSE}db $C4,$A1,$7D,$10,$64,$02,$A0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm1, [rcx + r8 - 64];{$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$C0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm2, [rdx + r8 - 64];{$ELSE}db $C4,$A1,$7D,$10,$54,$02,$C0;{$ENDIF} - {$IFDEF FPC}vmulps ymm1, ymm1, ymm2;{$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm1;{$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} - - {$IFDEF FPC}vmovupd ymm3, [rcx + r8 - 32];{$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$E0;{$ENDIF} - {$IFDEF FPC}vmovupd ymm4, [rdx + r8 - 32];{$ELSE}db $C4,$A1,$7D,$10,$64,$02,$E0;{$ENDIF} - {$IFDEF FPC}vmulps ymm3, ymm3, ymm4;{$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps ymm0, ymm0, ymm3;{$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} - - jmp @Loop1; - - @loopEnd1: - - {$IFDEF FPC}vextractf128 xmm2, ymm0, 1;{$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm2;{$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} - - sub r8, 128; - jz @loop2End; - - // loop to get all fitting into an array of 4 - @Loop2: - add r8, 16; - jg @Loop2End; - - {$IFDEF FPC}vmovupd xmm3, [rcx + r8 - 16];{$ELSE}db $C4,$A1,$79,$10,$5C,$01,$F0;{$ENDIF} - {$IFDEF FPC}vmovupd xmm4, [rdx + r8 - 16];{$ELSE}db $C4,$A1,$79,$10,$64,$02,$F0;{$ENDIF} - {$IFDEF FPC}vmulps xmm3, xmm3, xmm4;{$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddps xmm0, xmm0, xmm3;{$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} - jmp @Loop2; - - @Loop2End: - - // handle last 2 elements - sub r8, 16; - jz @loop3End; - - @loop3: - add r8, 4; - jg @loop3End; - - {$IFDEF FPC}vmovss xmm3, [rcx + r8 - 4];{$ELSE}db $C4,$A1,$7A,$10,$5C,$01,$FC;{$ENDIF} - {$IFDEF FPC}vmovss xmm4, [rdx + r8 - 4];{$ELSE}db $C4,$A1,$7A,$10,$64,$02,$FC;{$ENDIF} - {$IFDEF FPC}vmulss xmm3, xmm3, xmm4;{$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} - {$IFDEF FPC}vaddss xmm0, xmm0, xmm3;{$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} - - jmp @loop3; - @loop3End: - - // build result - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vhaddps xmm0, xmm0, xmm0;{$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} - {$IFDEF FPC}vzeroupper;{$ELSE}db $C5,$F8,$77;{$ENDIF} - movss Result, xmm0; -end; - -{$ENDIF} - -end. diff --git a/neural/NeuralAVX.pas b/neural/NeuralAVX.pas new file mode 100644 index 00000000..88f8140e --- /dev/null +++ b/neural/NeuralAVX.pas @@ -0,0 +1,438 @@ +unit NeuralAVX; + +// ########################################### +// #### 32 bit intel avx functions +// ########################################### + +interface + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} +{$IFNDEF x64} + +{$DEFINE AVXSUP} // assembler support for AVX/FMA built in +{$IFNDEF FPC} +{$IF CompilerVersion<135} // delhi compiler bug prevents on AVX512 -> use a very future compiler version... +{$UNDEF AVXSUP} +{$IFEND} +{$ENDIF} + + +// performs Result = sum(x[i]*y[i]); +function AVX2DotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +function AVX512DotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} + +// performs x[i] = x[i] + fact*y[i]; +procedure AVX2MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +procedure AVX512MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} + +{$ENDIF} + +implementation + +{$IFNDEF x64} + +{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} + +function AVX2DotProd( x : PSingle; y : PSingle; N : integer ) : single; +// eax = x, edx = y, ecx = N +asm + // iters + imul ecx, -4; + + // helper registers for the x, y pointers + sub eax, ecx; + sub edx, ecx; + + {$IFDEF AVXSUP}vxorpd ymm0, ymm0, ymm0; {$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} + + // unrolled loop + @Loop1: + add ecx, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 128]; {$ELSE}db $C5,$FD,$10,$4C,$08,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 128]; {$ELSE}db $C5,$FD,$10,$54,$0A,$80;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [eax + ecx - 96]; {$ELSE}db $C5,$FD,$10,$5C,$08,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [edx + ecx - 96]; {$ELSE}db $C5,$FD,$10,$64,$0A,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 64]; {$ELSE}db $C5,$FD,$10,$4C,$08,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 64]; {$ELSE}db $C5,$FD,$10,$54,$0A,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [eax + ecx - 32]; {$ELSE}db $C5,$FD,$10,$5C,$08,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [edx + ecx - 32]; {$ELSE}db $C5,$FD,$10,$64,$0A,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + {$IFDEF AVXSUP}vextractf128 xmm2, ymm0, 1; {$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm2; {$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} + + sub ecx, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add ecx, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm3, [eax + ecx - 16]; {$ELSE}db $C5,$F9,$10,$5C,$08,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm4, [edx + ecx - 16]; {$ELSE}db $C5,$F9,$10,$64,$0A,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmulps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm3; {$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub ecx, 16; + jz @loop3End; + + @loop3: + add ecx, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm3, [eax + ecx - 4]; {$ELSE}db $C5,$FA,$10,$5C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm4, [edx + ecx - 4]; {$ELSE}db $C5,$FA,$10,$64,$0A,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmulss xmm3, xmm3, xmm4; {$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm0, xmm0, xmm3; {$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop3; + @loop3End: + + // build result + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + +function AVX512DotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +// eax = x, edx = y, ecx = N +asm + // iters + imul ecx, -4; + + // helper registers for the x, y pointers + sub eax, ecx; + sub edx, ecx; + + {$IFDEF AVXSUP}vxorps zmm5, zmm5, zmm5; {$ELSE}db $62,$F1,$54,$48,$57,$ED;{$ENDIF} + {$IFDEF AVXSUP}vxorps ymm0, ymm0, ymm0; {$ELSE}db $C5,$FC,$57,$C0;{$ENDIF} + + // unrolled loop + @Loop1: + add ecx, 256; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 256]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 256]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FC;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm1, zmm2; {$ELSE}db $62,$F2,$75,$48,$B8,$EA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm3, [eax + ecx - 192]; {$ELSE}db $62,$F1,$FD,$48,$10,$5C,$08,$FD;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm4, [edx + ecx - 192]; {$ELSE}db $62,$F1,$FD,$48,$10,$64,$0A,$FD;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm3, zmm4; {$ELSE}db $62,$F2,$65,$48,$B8,$EC;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 128]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FE;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 128]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FE;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm1, zmm2; {$ELSE}db $62,$F2,$75,$48,$B8,$EA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm3, [eax + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$5C,$08,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm4, [edx + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$64,$0A,$FF;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm3, zmm4; {$ELSE}db $62,$F2,$65,$48,$B8,$EC;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + sub ecx, 256; + jz @buildRes; + + @Loop2: + add ecx, 64; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FF;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm1, zmm2; {$ELSE}db $62,$F2,$75,$48,$B8,$EA;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + sub ecx, 64; + jz @buildRes; + + // loop to get all fitting into an array of 4 + @Loop3: + add ecx, 16; + jg @Loop3End; + + {$IFDEF AVXSUP}vmovupd xmm3, [eax + ecx - 16]; {$ELSE}db $C5,$F9,$10,$5C,$08,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm4, [edx + ecx - 16]; {$ELSE}db $C5,$F9,$10,$64,$0A,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmulps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm3; {$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop3; + + @Loop3End: + + // handle last 2 elements + sub ecx, 16; + jz @buildRes; + + @loop4: + add ecx, 4; + jg @loop4End; + + {$IFDEF AVXSUP}vmovss xmm3, [eax + ecx - 4]; {$ELSE}db $C5,$FA,$10,$5C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm4, [edx + ecx - 4]; {$ELSE}db $C5,$FA,$10,$64,$0A,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmulss xmm3, xmm3, xmm4; {$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm0, xmm0, xmm3; {$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop4; + @loop4End: + @buildRes: + + // add result from the zmm register to xmm + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm1, zmm5, 0; {$ELSE}db $62,$F3,$7D,$48,$19,$E9,$00;{$ENDIF} + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm2, zmm5, 1; {$ELSE}db $62,$F3,$7D,$48,$19,$EA,$01;{$ENDIF} + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm3, zmm5, 2; {$ELSE}db $62,$F3,$7D,$48,$19,$EB,$02;{$ENDIF} + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm4, zmm5, 3; {$ELSE}db $62,$F3,$7D,$48,$19,$EC,$03;{$ENDIF} + + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$58,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm5, xmm1, xmm3; {$ELSE}db $C5,$F0,$58,$EB;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm5; {$ELSE}db $C5,$F8,$58,$C5;{$ENDIF} + + // build result + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + +procedure AVX2MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +// eax = x, edx = y, ecx = N +asm + push esi; + + // broadcast factor to ymm0 + lea esi, fact; + {$IFDEF AVXSUP}vbroadcastss ymm0, fact; {$ELSE}db $C4,$E2,$7D,$18,$45,$08;{$ENDIF} + + // iters + imul ecx, -4; + + // helper registers for the x, y + sub eax, ecx; + sub edx, ecx; + + // unrolled loop + @Loop1: + add ecx, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 128]; {$ELSE}db $C5,$FD,$10,$4C,$08,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 128]; {$ELSE}db $C5,$FD,$10,$54,$0A,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 128], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 96]; {$ELSE}db $C5,$FD,$10,$4C,$08,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 96]; {$ELSE}db $C5,$FD,$10,$54,$0A,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 96], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 64]; {$ELSE}db $C5,$FD,$10,$4C,$08,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 64]; {$ELSE}db $C5,$FD,$10,$54,$0A,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 64], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [eax + ecx - 32]; {$ELSE}db $C5,$FD,$10,$4C,$08,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [edx + ecx - 32]; {$ELSE}db $C5,$FD,$10,$54,$0A,$E0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 32], ymm1; {$ELSE}db $C5,$FD,$11,$4C,$08,$E0;{$ENDIF} + jmp @Loop1; + + @loopEnd1: + + sub ecx, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add ecx, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm1, [eax + ecx - 16]; {$ELSE}db $C5,$F9,$10,$4C,$08,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm2, [edx + ecx - 16]; {$ELSE}db $C5,$F9,$10,$54,$0A,$F0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps xmm2, xmm2, xmm0; {$ELSE}db $C5,$E8,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 16], xmm1; {$ELSE}db $C5,$F9,$11,$4C,$08,$F0;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub ecx, 16; + jz @loop3End; + + @loop3: + add ecx, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm1, [eax + ecx - 4]; {$ELSE}db $C5,$FA,$10,$4C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm2, [edx + ecx - 4]; {$ELSE}db $C5,$FA,$10,$54,$0A,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmulss xmm2, xmm2, xmm0; {$ELSE}db $C5,$EA,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm1, xmm1, xmm2; {$ELSE}db $C5,$F2,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovss [eax + ecx - 4], xmm1; {$ELSE}db $C5,$FA,$11,$4C,$08,$FC;{$ENDIF} + + jmp @loop3; + @loop3End: + + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + pop esi; +end; + +procedure AVX512MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +// eax = x, edx = y, ecx = N +asm + push esi; + + // broadcast factor to zmm0 + lea esi, fact; + {$IFDEF AVXSUP}vbroadcastss zmm0, fact; {$ELSE}db $62,$F2,$7D,$48,$18,$45,$02;{$ENDIF} + + // iters + imul ecx, -4; + + // helper registers for the x, y + sub eax, ecx; + sub edx, ecx; + + // unrolled loop + @Loop1: + add ecx, 256; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 256]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 256]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 256], zmm1; {$ELSE}db $62,$F1,$FD,$48,$11,$4C,$08,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 192]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FD;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 192]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FD;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 192], zmm1; {$ELSE}db $62,$F1,$FD,$48,$11,$4C,$08,$FD;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 128]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FE;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 128]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FE;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 128], zmm1; {$ELSE}db $62,$F1,$FD,$48,$11,$4C,$08,$FE;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FF;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 64], zmm1; {$ELSE}db $62,$F1,$FD,$48,$11,$4C,$08,$FF;{$ENDIF} + jmp @Loop1; + + @loopEnd1: + + sub ecx, 256; + jz @exitProc; + + @Loop2: + add ecx, 64; + jg @loopEnd2; + + {$IFDEF AVXSUP}vmovupd zmm1, [eax + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$4C,$08,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [edx + ecx - 64]; {$ELSE}db $62,$F1,$FD,$48,$10,$54,$0A,$FF;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 64], zmm1; {$ELSE}db $62,$F1,$FD,$48,$11,$4C,$08,$FF;{$ENDIF} + jmp @Loop2; + + @LoopEnd2: + sub ecx, 64; + jz @exitProc; + + // loop to get all fitting into an array of 4 + @Loop3: + add ecx, 16; + jg @Loop3End; + + {$IFDEF AVXSUP}vmovupd xmm1, [eax + ecx - 16]; {$ELSE}db $C5,$F9,$10,$4C,$08,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm2, [edx + ecx - 16]; {$ELSE}db $C5,$F9,$10,$54,$0A,$F0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps xmm2, xmm2, xmm0; {$ELSE}db $C5,$E8,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [eax + ecx - 16], xmm1; {$ELSE}db $C5,$F9,$11,$4C,$08,$F0;{$ENDIF} + jmp @Loop3; + + @Loop3End: + + // handle last 2 elements + sub ecx, 16; + jz @exitProc; + + @loop4: + add ecx, 4; + jg @loop4End; + + {$IFDEF AVXSUP}vmovss xmm1, [eax + ecx - 4]; {$ELSE}db $C5,$FA,$10,$4C,$08,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm2, [edx + ecx - 4]; {$ELSE}db $C5,$FA,$10,$54,$0A,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmulss xmm2, xmm2, xmm0; {$ELSE}db $C5,$EA,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm1, xmm1, xmm2; {$ELSE}db $C5,$F2,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovss [eax + ecx - 4], xmm1; {$ELSE}db $C5,$FA,$11,$4C,$08,$FC;{$ENDIF} + + jmp @loop4; + @loop4End: + @exitProc: + + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + pop esi; +end; + +{$ENDIF} + +end. diff --git a/neural/NeuralAVXx64.pas b/neural/NeuralAVXx64.pas new file mode 100644 index 00000000..d9fa44a0 --- /dev/null +++ b/neural/NeuralAVXx64.pas @@ -0,0 +1,464 @@ +unit NeuralAVXx64; + +// ########################################### +// #### 64 bit intel avx functions +// ########################################### + +interface + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF x64} + +{$DEFINE AVXSUP} // assembler support for AVX/FMA built in +{$IFNDEF FPC} +{$IF CompilerVersion<135} // delhi compiler bug prevents on AVX512 -> use a very future compiler version... +{$UNDEF AVXSUP} +{$IFEND} +{$ENDIF} + +// performs Result = sum(x[i]*y[i]); +function AVX2DotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC}assembler;{$ENDIF} +function AVX512DotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ENDIF} + +// performs x[i] = x[i] + fact*y[i]; +procedure AVX2MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ENDIF} +procedure AVX512MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} + + +{$ENDIF} + +implementation + +{$IFDEF x64} + +{$IFDEF FPC} {$ASMMODE intel} {$S-} {$ENDIF} + +function AVX2DotProd( x : PSingle; y : PSingle; N : integer ) : single; +asm + {$IFDEF UNIX} + // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI + // The parameters are passed in the following order: + // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 + mov r8, rdx; + mov rdx, rsi; + mov rcx, rdi; + {$ENDIF} + + // iters + imul r8, -4; + + // helper registers for the mt1, mt2 and dest pointers + sub rcx, r8; + sub rdx, r8; + + {$IFDEF AVXSUP}vxorpd ymm0, ymm0, ymm0; {$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} + + // unrolled loop + @Loop1: + add r8, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$80;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [rcx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [rdx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$64,$02,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$59,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm1; {$ELSE}db $C5,$FC,$58,$C1;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm3, [rcx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$5C,$01,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm4, [rdx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$64,$02,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmulps ymm3, ymm3, ymm4; {$ELSE}db $C5,$E4,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm0, ymm0, ymm3; {$ELSE}db $C5,$FC,$58,$C3;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + {$IFDEF AVXSUP}vextractf128 xmm2, ymm0, 1; {$ELSE}db $C4,$E3,$7D,$19,$C2,$01;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm2; {$ELSE}db $C5,$FB,$7C,$C2;{$ENDIF} + + sub r8, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add r8, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm3, [rcx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$5C,$01,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm4, [rdx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$64,$02,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmulps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm3; {$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub r8, 16; + jz @loop3End; + + @loop3: + add r8, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm3, [rcx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$5C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm4, [rdx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$64,$02,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmulss xmm3, xmm3, xmm4; {$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm0, xmm0, xmm3; {$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop3; + @loop3End: + + // build result + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + + +function AVX512DotProd( x : PSingle; y : PSingle; N : integer ) : single; {$IFDEF FPC} assembler; {$ENDIF} +asm + {$IFDEF UNIX} + // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI + // The parameters are passed in the following order: + // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 + mov r8, rdx; + mov rdx, rsi; + mov rcx, rdi; + {$ENDIF} + + // iters + imul r8, -4; + + // adjust pointers for reverse array access + sub rcx, r8; + sub rdx, r8; + + {$IFDEF AVXSUP}vxorpd ymm0, ymm0, ymm0; {$ELSE}db $C5,$FD,$57,$C0;{$ENDIF} + {$IFDEF AVXSUP}vxorps zmm5, zmm5, zmm5; {$ELSE}db $62,$F1,$54,$48,$57,$ED;{$ENDIF} + + // unrolled loop + @Loop1: + add r8, 256; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 256]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 256]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FC;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm1, zmm2; {$ELSE}db $62,$F2,$75,$48,$B8,$EA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm3, [rcx + r8 - 192]; {$ELSE}db $62,$B1,$FD,$48,$10,$5C,$01,$FD;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm4, [rdx + r8 - 192]; {$ELSE}db $62,$B1,$FD,$48,$10,$64,$02,$FD;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm3, zmm4; {$ELSE}db $62,$F2,$65,$48,$B8,$EC;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 128]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FE;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 128]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FE;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm1, zmm2; {$ELSE}db $62,$F2,$75,$48,$B8,$EA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm3, [rcx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$5C,$01,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm4, [rdx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$64,$02,$FF;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm3, zmm4; {$ELSE}db $62,$F2,$65,$48,$B8,$EC;{$ENDIF} + + jmp @Loop1; + + @loopEnd1: + + sub r8, 256; + jz @buildRes; + + @Loop2: + add r8, 64; + jg @loopEnd2; + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FF;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm5, zmm1, zmm2; {$ELSE}db $62,$F2,$75,$48,$B8,$EA;{$ENDIF} + jmp @Loop2; + @loopEnd2: + + sub r8, 64; + jz @buildRes; + + // loop to get all fitting into an array of 4 + @Loop3: + add r8, 16; + jg @Loop3End; + + {$IFDEF AVXSUP}vmovupd xmm3, [rcx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$5C,$01,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm4, [rdx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$64,$02,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmulps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm3; {$ELSE}db $C5,$F8,$58,$C3;{$ENDIF} + jmp @Loop3; + @Loop3End: + + sub r8, 16; + jz @buildRes; + + // handle last elements + @loop4: + add r8, 4; + jg @loop4End; + + {$IFDEF AVXSUP}vmovss xmm3, [rcx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$5C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm4, [rdx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$64,$02,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmulss xmm3, xmm3, xmm4; {$ELSE}db $C5,$E2,$59,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm0, xmm0, xmm3; {$ELSE}db $C5,$FA,$58,$C3;{$ENDIF} + + jmp @loop4; + @loop4End: + @buildRes: + + // add result from the zmm register to xmm + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm1, zmm5, 0; {$ELSE}db $62,$F3,$7D,$48,$19,$E9,$00;{$ENDIF} + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm2, zmm5, 1; {$ELSE}db $62,$F3,$7D,$48,$19,$EA,$01;{$ENDIF} + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm3, zmm5, 2; {$ELSE}db $62,$F3,$7D,$48,$19,$EB,$02;{$ENDIF} + {$IFDEF AVXSUP}VEXTRACTF32x4 xmm4, zmm5, 3; {$ELSE}db $62,$F3,$7D,$48,$19,$EC,$03;{$ENDIF} + + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm3, xmm3, xmm4; {$ELSE}db $C5,$E0,$58,$DC;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm5, xmm1, xmm3; {$ELSE}db $C5,$F0,$58,$EB;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm0, xmm0, xmm5; {$ELSE}db $C5,$F8,$58,$C5;{$ENDIF} + + // build result + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vhaddps xmm0, xmm0, xmm0; {$ELSE}db $C5,$FB,$7C,$C0;{$ENDIF} + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} + movss Result, xmm0; +end; + + +procedure AVX2MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +// eax = x, edx = y, ecx = N +asm + {$IFDEF UNIX} + // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI + // The parameters are passed in the following order: + // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 + mov r8, rdx; + mov rdx, rsi; + mov rcx, rdi; + {$ENDIF} + + // iters + imul r8, -4; + + // helper registers for the mt1, mt2 and dest pointers + sub rcx, r8; + sub rdx, r8; + + // broadcast factor to ymm0 + {$IFDEF AVXSUP}vbroadcastss ymm0, xmm3; {$ELSE}db $C4,$E2,$7D,$18,$C3;{$ENDIF} + + // unrolled loop + @Loop1: + add r8, 128; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$80;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 128]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 128], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$80;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$A0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 96]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 96], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$A0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$C0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 64]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 64], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$C0;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd ymm1, [rcx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$4C,$01,$E0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd ymm2, [rdx + r8 - 32]; {$ELSE}db $C4,$A1,$7D,$10,$54,$02,$E0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps ymm2, ymm2, ymm0; {$ELSE}db $C5,$EC,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps ymm1, ymm1, ymm2; {$ELSE}db $C5,$F4,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 32], ymm1; {$ELSE}db $C4,$A1,$7D,$11,$4C,$01,$E0;{$ENDIF} + jmp @Loop1; + + @loopEnd1: + + sub r8, 128; + jz @loop3End; + + // loop to get all fitting into an array of 4 + @Loop2: + add r8, 16; + jg @Loop2End; + + {$IFDEF AVXSUP}vmovupd xmm1, [rcx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$4C,$01,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm2, [rdx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$54,$02,$F0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps xmm2, xmm2, xmm0; {$ELSE}db $C5,$E8,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 16], xmm1; {$ELSE}db $C4,$A1,$79,$11,$4C,$01,$F0;{$ENDIF} + jmp @Loop2; + + @Loop2End: + + // handle last 2 elements + sub r8, 16; + jz @loop3End; + + @loop3: + add r8, 4; + jg @loop3End; + + {$IFDEF AVXSUP}vmovss xmm1, [rcx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$4C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm2, [rdx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$54,$02,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmulss xmm2, xmm2, xmm0; {$ELSE}db $C5,$EA,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm1, xmm1, xmm2; {$ELSE}db $C5,$F2,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovss [rcx + r8 - 4], xmm1; {$ELSE}db $C4,$A1,$7A,$11,$4C,$01,$FC;{$ENDIF} + + jmp @loop3; + @loop3End: + + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} +end; + +procedure AVX512MulAdd( x : PSingle; y : PSingle; N : integer; const fact : single); {$IFDEF FPC} assembler; {$ELSE} register; {$ENDIF} +// eax = x, edx = y, ecx = N +var r : Array[0..15] of single; +asm + {$IFDEF UNIX} + // Linux uses a diffrent ABI -> copy over the registers so they meet with winABI + // The parameters are passed in the following order: + // RDI, RSI, RDX, RCX, r8, r9 -> mov to RCX, RDX, R8, r9 + mov r8, rdx; + mov rdx, rsi; + mov rcx, rdi; + {$ENDIF} + + // iters + imul r8, -4; + + // helper registers for the mt1, mt2 and dest pointers + sub rcx, r8; + sub rdx, r8; + + // broadcast factor to zmm0 + {$IFDEF AVXSUP}vbroadcastss zmm0, xmm3; {$ELSE}db $62,$F2,$7D,$48,$18,$C3;{$ENDIF} + + // unrolled loop + @Loop1: + add r8, 256; + jg @loopEnd1; + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 256]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 256]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FC;{$ENDIF} + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 256], zmm1; {$ELSE}db $62,$B1,$FD,$48,$11,$4C,$01,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 192]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FD;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 192]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FD;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 192], zmm1; {$ELSE}db $62,$B1,$FD,$48,$11,$4C,$01,$FD;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 128]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FE;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 128]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FE;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 128], zmm1; {$ELSE}db $62,$B1,$FD,$48,$11,$4C,$01,$FE;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FF;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 64], zmm1; {$ELSE}db $62,$B1,$FD,$48,$11,$4C,$01,$FF;{$ENDIF} + jmp @Loop1; + + @loopEnd1: + + sub r8, 256; + jz @exitProc; + + @Loop2: + add r8, 64; + jg @loopEnd2; + + {$IFDEF AVXSUP}vmovupd zmm1, [rcx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$4C,$01,$FF;{$ENDIF} + {$IFDEF AVXSUP}vmovupd zmm2, [rdx + r8 - 64]; {$ELSE}db $62,$B1,$FD,$48,$10,$54,$02,$FF;{$ENDIF} + + {$IFDEF AVXSUP}vfmadd231ps zmm1, zmm2, zmm0; {$ELSE}db $62,$F2,$6D,$48,$B8,$C8;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 64], zmm1; {$ELSE}db $62,$B1,$FD,$48,$11,$4C,$01,$FF;{$ENDIF} + jmp @loop2; + + @loopEnd2: + + sub r8, 64; + jz @exitProc; + + // loop to get all fitting into an array of 4 + @Loop3: + add r8, 16; + jg @Loop3End; + + {$IFDEF AVXSUP}vmovupd xmm1, [rcx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$4C,$01,$F0;{$ENDIF} + {$IFDEF AVXSUP}vmovupd xmm2, [rdx + r8 - 16]; {$ELSE}db $C4,$A1,$79,$10,$54,$02,$F0;{$ENDIF} + + {$IFDEF AVXSUP}vmulps xmm2, xmm2, xmm0; {$ELSE}db $C5,$E8,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddps xmm1, xmm1, xmm2; {$ELSE}db $C5,$F0,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovupd [rcx + r8 - 16], xmm1; {$ELSE}db $C4,$A1,$79,$11,$4C,$01,$F0;{$ENDIF} + jmp @Loop3; + @Loop3End: + + // handle last elements + sub r8, 16; + jz @exitProc; + + @loop4: + add r8, 4; + jg @loop4End; + + {$IFDEF AVXSUP}vmovss xmm1, [rcx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$4C,$01,$FC;{$ENDIF} + {$IFDEF AVXSUP}vmovss xmm2, [rdx + r8 - 4]; {$ELSE}db $C4,$A1,$7A,$10,$54,$02,$FC;{$ENDIF} + + {$IFDEF AVXSUP}vmulss xmm2, xmm2, xmm0; {$ELSE}db $C5,$EA,$59,$D0;{$ENDIF} + {$IFDEF AVXSUP}vaddss xmm1, xmm1, xmm2; {$ELSE}db $C5,$F2,$58,$CA;{$ENDIF} + + {$IFDEF AVXSUP}vmovss [rcx + r8 - 4], xmm1; {$ELSE}db $C4,$A1,$7A,$11,$4C,$01,$FC;{$ENDIF} + + jmp @loop4; + @loop4End: + + @exitProc: + {$IFDEF AVXSUP}vzeroupper; {$ELSE}db $C5,$F8,$77;{$ENDIF} +end; + +{$ENDIF} + +end. diff --git a/neural/neuralavxcore.pas b/neural/neuralavxcore.pas new file mode 100644 index 00000000..fba5ae5e --- /dev/null +++ b/neural/neuralavxcore.pas @@ -0,0 +1,155 @@ +unit neuralavxcore; + +interface + +{$IFDEF CPUX64} +{$DEFINE x64} +{$ENDIF} +{$IFDEF cpux86_64} +{$DEFINE x64} +{$ENDIF} + +// ################################################# +// #### Exported AVX functions that use the "best" AVX cpu extension available + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; inline; +procedure AVXMulAdd( x : PSingle; y : PSingle; N : integer; const fact : single ); inline; + +procedure TestAVX; + +implementation + +uses SysUtils, CPUFeatures, Math, {$IFDEF x64} NeuralAVXx64 {$ELSE} NeuralAVX {$ENDIF}; + +function AVXDotProd( x : PSingle; y : PSingle; N : integer ) : single; +begin + if IsAVX512Present and (N >=32) + then + Result := AVX512DotProd(x, y, N) + else + Result := AVX2DotProd(x, y, N); +end; + +procedure AVXMulAdd( x : PSingle; y : PSingle; N : integer; const fact : single ); +begin + if IsAVX512Present and (N >=32) + then + AVX512MulAdd(x, y, N, fact) + else + AVX2MulAdd(x, y, N, fact); +end; + +type + TFloatArr = Array[0..127] of single; + PFloatArr = ^TFloatArr; + +procedure TestAVX; + function DotProd(PtrA, PtrB: PFloatArr; NumElements: integer) : single; + var i: Integer; + x : double; + begin + x := 0; + + for i := 0 to NumElements - 1 do + x := x + ptrA^[i]*ptrB^[i]; + + Result := x; + end; + + procedure MulAdd( PtrA, PtrB: PFloatArr; NumElements: integer; fact : single ); + var i : integer; + begin + for i := 0 to NumElements - 1 do + ptra^[i] := ptra^[i] + fact*PtrB^[i]; + end; +var a, b : Array[0..127] of single; + i: Integer; + c1, c2 : Array[0..127] of single; + j: Integer; + r1, r2 : single; +begin + // test the avx implementation for convolution and muldadd + for i := 0 to High(a) do + begin + a[i] := i/10; + b[i] := (i + 0.1)/5; + end; + + Write(' AVX Dot Prod'); + for i := 1 to Length(a) do + begin + r1 := DotProd( @a[0], @b[0], i); + r2 := AVX2DotProd( @a[0], @b[0], i ); + if not SameValue( r1, r2, 1e-3) then + begin + Writeln; + Writeln('Dot product failed @ index ' + IntToStr(i) ); + exit; + end; + end; + Writeln('... passed'); + + Write(' AVX MulAdd'); + for i := 1 to Length(a) do + begin + Move( a[0], c1[0], sizeof(c1)); + Move( a[0], c2[0], sizeof(c2)); + + MulAdd( @c1[0], @b[0], i, 0.2 ); + AVX2MulAdd( @c2[0], @b[0], i, 0.2); + + for j := 0 to i - 1 do + begin + if not SameValue( c1[j], c2[j], 1e-5) then + begin + Writeln; + Writeln('MulAdd failed @ index ' + IntToStr(i) + ',' + IntToStr(j) ); + exit; + end; + end; + end; + Writeln('... passed'); + + if IsAVX512Present then + begin + Write(' AVX512 Dot Prod'); + for i := 1 to Length(a) do + begin + r1 := DotProd( @a[0], @b[0], i); + r2 := AVX512DotProd( @a[0], @b[0], i ); + if not SameValue( r1, r2, 1e-3) then + begin + Writeln; + Writeln('512bit Dot product failed @ index ' + IntToStr(i) ); + exit; + end; + end; + Writeln('... passed'); + + Write(' AVX512 MulAdd'); + for i := 1 to Length(a) do + begin + Move( a[0], c1[0], sizeof(c1)); + Move( a[0], c2[0], sizeof(c2)); + + MulAdd( @c1[0], @b[0], i, 0.2 ); + AVX512MulAdd( @c2[0], @b[0], i, 0.2); + + for j := 0 to i - 1 do + begin + if not SameValue( c1[j], c2[j], 1e-4) then + begin + Writeln; + Writeln('MulAdd failed @ index ' + IntToStr(i) + ',' + IntToStr(j), ' ', c1[j], '/', c2[j] ); + exit; + end; + end; + end; + Writeln('... passed'); + end + else + Writeln('AVX 512 test skipped - no CPU support detected'); +end; + +end. + diff --git a/neural/neuraldatasets.pas b/neural/neuraldatasets.pas index b015a4f0..1fd76735 100644 --- a/neural/neuraldatasets.pas +++ b/neural/neuraldatasets.pas @@ -747,16 +747,15 @@ function TClassesAndElements.FileCountAtClassId(ClassId: integer): integer; end; {$IFDEF FPC} -function TFileNameList.ThreadSafeLoadImageFromFileIntoVolume( - ImageFileName: string; V: TNNetVolume): boolean; +procedure TFileNameList.ThreadSafeLoadImageFromFileIntoVolume(ImageFileName:string; V:TNNetVolume); var M: TFPMemoryImage; begin M := TFPMemoryImage.Create(1, 1); {$IFDEF HASTHREADS}EnterCriticalSection(FCritSecLoad);{$ENDIF} - Result := M.LoadFromFile( ImageFileName ); + M.LoadFromFile( ImageFileName ); {$IFDEF HASTHREADS}LeaveCriticalSection(FCritSecLoad);{$ENDIF} - if Result then LoadImageIntoVolume(M, V); + LoadImageIntoVolume(M, V); M.Free; end; diff --git a/neural/neuralfit.pas b/neural/neuralfit.pas index 06d6c039..b4b89b60 100644 --- a/neural/neuralfit.pas +++ b/neural/neuralfit.pas @@ -311,7 +311,7 @@ TNeuralImageLoadingFit = class(TNeuralDataLoadingFit) procedure GetValidationProc(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); procedure GetTestProc(Idx: integer; ThreadId: integer; pInput, pOutput: TNNetVolume); public - constructor Create(); override; + constructor Create(); destructor Destroy(); override; procedure FitLoading(pNN: TNNet; diff --git a/neural/neuralopencl.pas b/neural/neuralopencl.pas index 2d252c64..6902a6b9 100644 --- a/neural/neuralopencl.pas +++ b/neural/neuralopencl.pas @@ -560,10 +560,11 @@ procedure TDotProductSharedKernel.FinishAndLoadResult(Results: TNNetVolume; end; procedure TNeuralKernel.Prepare; -var resStream : TResourceStream; +{$IFNDEF FPC} var resStream : TResourceStream; {$ENDIF} begin // ########################################### // #### Check if the neural.cl file is part of the resources + {$IFNDEF FPC} try resStream := TResourceStream.Create(hInstance, 'NeuralCL', RT_RCDATA); FOpenCLProgramSource.LoadFromStream(resStream, TEncoding.UTF8); @@ -575,6 +576,7 @@ procedure TNeuralKernel.Prepare; except MessageProc('Resource NeuralCL not found - try to open file...'); end; + {$ENDIF} // Create the OpenCL Kernel Here: if FileExists('../../../neural/neural.cl') then @@ -897,6 +899,11 @@ function TEasyOpenCLV.CreateOutputSetArgument(V: TNNetVolume; clSetKernelArg(kernel, arg_index, sizeof(cl_mem), @Result); end; +function GetString( err : integer ) : string; +begin + Result := 'ERROR'; +end; + { TEasyOpenCL } procedure TEasyOpenCL.LoadPlatforms(); var diff --git a/neural/neuralopenclv.pas b/neural/neuralopenclv.pas index 056c48d4..80c9a277 100644 --- a/neural/neuralopenclv.pas +++ b/neural/neuralopenclv.pas @@ -24,7 +24,7 @@ interface uses - Classes, SysUtils, neuralopencl, {$IF CompilerVersion >= 23} VCL.StdCtrls {$ELSE} StdCtrls {$IFEND}; + Classes, SysUtils, neuralopencl,{$IFDEF FPC} StdCtrls {$ELSE} {$IF CompilerVersion >= 23} VCL.StdCtrls {$ELSE} StdCtrls {$IFEND} {$ENDIF}; type { TEasyOpenCLCL } diff --git a/neural/neuralthread.pas b/neural/neuralthread.pas index a5a857f8..3cf0028c 100644 --- a/neural/neuralthread.pas +++ b/neural/neuralthread.pas @@ -174,6 +174,7 @@ procedure CreateNeuralThreadListIfRequired(); end; end; +{$IFNDEF FPC} {$IF CompilerVersion <= 23} // delphi 2010 does not define the following functions and constants {$IFDEF MSWINDOWS} @@ -205,6 +206,37 @@ function GetThreadGroupAffinity(hThread: THandle; var GroupAffinity: TGroupAffin function GetActiveProcessorGroupCount: WORD; stdcall; external kernel32 name 'GetThreadGroupAffinity'; {$ENDIF} {$IFEND} +{$ELSE} +{$IFDEF MSWINDOWS} +const ALL_PROCESSOR_GROUPS = $ffff; + + // + // Structure to represent a group-specific affinity, such as that of a + // thread. Specifies the group number and the affinity within that group. + // +type + KAFFINITY = ULONG_PTR; + _GROUP_AFFINITY = record + Mask: KAFFINITY; + Group: WORD; + Reserved: array[0..2] of WORD; + end; + {$EXTERNALSYM _GROUP_AFFINITY} + GROUP_AFFINITY = _GROUP_AFFINITY; + {$EXTERNALSYM GROUP_AFFINITY} + PGROUP_AFFINITY = ^_GROUP_AFFINITY; + {$EXTERNALSYM PGROUP_AFFINITY} + TGroupAffinity = _GROUP_AFFINITY; + PGroupAffinity = PGROUP_AFFINITY; + +function GetActiveProcessorCount(GroupNumber: WORD): DWORD; stdcall; external 'kernel32.dll'; +function SetThreadGroupAffinity(hThread: THandle; var GroupAffinity: TGroupAffinity; + PreviousGroupAffinity: PGroupAffinity): ByteBool; stdcall; external kernel32 name 'GetThreadGroupAffinity'; +function GetThreadGroupAffinity(hThread: THandle; var GroupAffinity: TGroupAffinity): ByteBool; stdcall; external kernel32 name 'GetThreadGroupAffinity'; +function GetActiveProcessorGroupCount: WORD; stdcall; external kernel32 name 'GetThreadGroupAffinity'; +{$ENDIF} + +{$ENDIF} function NeuralDefaultThreadCount: integer; begin diff --git a/neural/neuralvolume.pas b/neural/neuralvolume.pas index f8f16b32..3e9ed3d4 100644 --- a/neural/neuralvolume.pas +++ b/neural/neuralvolume.pas @@ -89,7 +89,9 @@ interface { TVolume } {$IFDEF FPC} TIntegerList = class (specialize TFPGList<integer>); - generic TVolume<T> = class(TObject) + T = TNeuralFloat; + //generic TVolume<T> = class(TObject) // why anyway? + TVolume = class(TObject) {$ELSE} TIntegerList = TList<Integer>; T = TNeuralFloat; @@ -111,13 +113,14 @@ TVolume = class(TObject) FSizeY: integer; FDepth: integer; FTag: array[0..1] of integer; + FLastPos: integer; function GetTag: integer; {$IFDEF Release} inline; {$ENDIF} procedure SetTag(I: integer); {$IFDEF Release} inline; {$ENDIF} function GetTags(x: integer): integer; {$IFDEF Release} inline; {$ENDIF} procedure SetTags(x: integer; AValue: integer); {$IFDEF Release} inline; {$ENDIF} class procedure MulAddPPVS(PtrA, PtrB: TNeuralFloatArrPtr; Value: T; - pSize: integer); {$IFDEF Release} inline; {$ENDIF} + pSize: integer); {$IFDEF Release} {$IFDEF FPC} inline; {$ENDIF} {$ENDIF} public // FData was made public to allow other fast operations FData: array of T; @@ -198,11 +201,11 @@ TVolume = class(TObject) procedure Copy(var Original: array of T); overload; procedure Copy(var Original: array of byte); overload; procedure Copy(Original: TBits; pFlase: T = -0.5; pTrue: T = +0.5); overload; - procedure CopyPadding(Original: TVolume; Padding: integer); {$IFDEF Release} inline; {$ENDIF} overload; - procedure CopyPadding(Original: TVolume; PaddingX, PaddingY: integer); {$IFDEF Release} inline; {$ENDIF} overload; + procedure CopyPadding(Original: TVolume; Padding: integer); overload; {$IFDEF Release} inline; {$ENDIF} + procedure CopyPadding(Original: TVolume; PaddingX, PaddingY: integer); overload; {$IFDEF Release} inline; {$ENDIF} procedure CopyCropping(Original: TVolume; StartX, StartY, pSizeX, pSizeY: integer); procedure CopyResizing(Original: TVolume; NewSizeX, NewSizeY: integer); - procedure CopyNoChecks(Original: TVolume); {$IFDEF Release} inline; {$ENDIF} overload; + procedure CopyNoChecks(Original: TVolume); overload; {$IFDEF Release} inline; {$ENDIF} procedure CopyNoChecks(var Original: array of byte); overload; procedure CopyNoChecksIntArr(var Original: array of integer); overload; procedure CopyReversedNoChecksIntArr(var Original: array of integer); overload; @@ -215,7 +218,7 @@ TVolume = class(TObject) procedure CopyTransposingAs2D(Original: TVolume); procedure Define(Original: array of T); function DotProduct(Original: TVolume): T; overload; {$IFDEF Release} inline; {$ENDIF} - class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} + class function DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: integer): Single; overload; class function Product(PtrA: TNeuralFloatArrPtr; NumElements: integer): Single; overload; {$IFDEF Release} inline; {$ENDIF} function SumDiff(Original: TVolume): T; {$IFDEF Release} inline; {$ENDIF} procedure DebugDiff(Original: TVolume; Limit: Single = 0); @@ -336,6 +339,7 @@ TVolume = class(TObject) property SizeX: integer read FSizeX; property SizeY: integer read FSizeY; property Depth: integer read FDepth; + end; TNNetToken = record @@ -346,11 +350,11 @@ TNNetToken = record TNNetTokenArray = array of TNNetToken; { TNNetVolume } - {$IFDEF FPC} - TNNetVolume = class (specialize TVolume<TNeuralFloat>) - {$ELSE} + {.$IFDEF FPC} + // TNNetVolume = class (specialize TVolume<TNeuralFloat>) + {.$ELSE} TNNetVolume = class (TVolume) - {$ENDIF} + {.$ENDIF} private FDataPtr: TNeuralFloatArrPtr; public @@ -841,8 +845,8 @@ implementation {$DEFINE x64} {$ENDIF} -uses {$IFNDEF x64} Neural.AVX {$ELSE} Neural.AVXx64{$ENDIF}, neuralbit, Math, strutils, - CPUFeatures; +uses {$IFNDEF FPC} neuralavxcore, {$IFDEF x64} NeuralAVXx64, {$ELSE} NeuralAVX, {$ENDIF} {$ENDIF}neuralbit, Math, + CPUFeatures, strutils; var locDataFmtSet : TFormatSettings; @@ -1767,6 +1771,18 @@ function RectifiedLinearUnitDerivative(x: TNeuralFloat): TNeuralFloat; end; // paper: GAUSSIAN ERROR LINEAR UNITS (GELUS) Gimpel et al. 2018 + +// https://www.musicdsp.org/en/latest/Other/178-reasonably-accurate-fastish-tanh-approximation.html + +function Tanh_fast(x : Single) : Single; inline; +var a,b:Single; +begin + x := x*2; + a := abs(x); + b := (6+a*(3+a)); + Result := (x*b)/(a*b+12); +end; + function GaussErrorLinUnit(x : TNeuralFloat) : TNeuralFloat; const cSqrt_2_pi = 0.797884560803; begin @@ -1778,7 +1794,8 @@ function GaussErrorLinUnit(x : TNeuralFloat) : TNeuralFloat; then Result := 0 else - Result := 0.5*x*(1 + tanh( cSqrt_2_pi*( x + 0.044715*x*x*x))); + // Result := 0.5*x*(1 + tanh( cSqrt_2_pi*( x + 0.044715*x*x*x))); + Result := 0.5*x*(1 + 2*Tanh_fast( cSqrt_2_pi*( x + 0.044715*x*x*x))); end; function GaussErrorLinUnitDerivative(x : TNeuralFloat) : TNeuralFloat; @@ -4191,61 +4208,6 @@ procedure TVolume.MulAdd(Original1, Original2: TVolume); MulAdd(Addr(Self.FData[0]), Addr(Original1.FData[0]), Addr(Original2.FData[0]), Self.Size); end; -class procedure TVolume.MulAddPPVS(PtrA, PtrB: TNeuralFloatArrPtr; Value: T; - pSize: integer); -var - I: integer; - vHigh: integer; - BasePos: integer; - {$IFDEF FPC} - AddrA, AddrB: TNeuralFloatPtr; - {$ENDIF} -begin - BasePos := 0; - vHigh := pSize - 1; - - {$IFDEF FPC} - AddrA := pointer(PtrA); - AddrB := pointer(PtrB); - while BasePos <= vHigh - 7 do - begin - (AddrA)^ := (AddrA)^ + (AddrB)^ * Value; - (AddrA+1)^ := (AddrA+1)^ + (AddrB+1)^ * Value; - (AddrA+2)^ := (AddrA+2)^ + (AddrB+2)^ * Value; - (AddrA+3)^ := (AddrA+3)^ + (AddrB+3)^ * Value; - (AddrA+4)^ := (AddrA+4)^ + (AddrB+4)^ * Value; - (AddrA+5)^ := (AddrA+5)^ + (AddrB+5)^ * Value; - (AddrA+6)^ := (AddrA+6)^ + (AddrB+6)^ * Value; - (AddrA+7)^ := (AddrA+7)^ + (AddrB+7)^ * Value; - BasePos := BasePos + 8; - AddrA := AddrA + 8; - AddrB := AddrB + 8; - end; - - while BasePos <= vHigh - 3 do - begin - (AddrA)^ := (AddrA)^ + (AddrB)^ * Value; - (AddrA+1)^ := (AddrA+1)^ + (AddrB+1)^ * Value; - (AddrA+2)^ := (AddrA+2)^ + (AddrB+2)^ * Value; - (AddrA+3)^ := (AddrA+3)^ + (AddrB+3)^ * Value; - BasePos := BasePos + 4; - AddrA := AddrA + 4; - AddrB := AddrB + 4; - end; - {$ENDIF} - - if BasePos <= vHigh then for I := BasePos to vHigh do - begin - //Write(PtrA^[I],' ', PtrB^[I],' ', Value,'->'); - {$IFDEF FPC} - PtrA^[I] += PtrB^[I]*Value; - {$ELSE} - PtrA^[I] := PtrA^[I] + PtrB^[I]*Value; - {$ENDIF} - //WriteLn(PtrA^[I]); - end; -end; - class procedure TVolume.MulMulAdd(PtrA, PtrB: TNeuralFloatArrPtr; Value1, Value2: T; pSize: integer); var @@ -11077,6 +11039,68 @@ class function TVolume.DotProduct(PtrA, PtrB: TNeuralFloatArrPtr; NumElements: i {$ENDIF} end; +class procedure TVolume.MulAddPPVS(PtrA, PtrB: TNeuralFloatArrPtr; Value: T; + pSize: integer); +var + I: integer; + vHigh: integer; + BasePos: integer; + {$IFDEF FPC} + AddrA, AddrB: TNeuralFloatPtr; + {$ENDIF} +begin + {$IFNDEF FPC} + if locAVX and (pSize >= 4 ) then + begin + AVXMulAdd(PSingle(PtrA), PSingle(PtrB), pSize, Value); + exit; + end; + {$ENDIF} + BasePos := 0; + vHigh := pSize - 1; + + {$IFDEF FPC} + AddrA := pointer(PtrA); + AddrB := pointer(PtrB); + while BasePos <= vHigh - 7 do + begin + (AddrA)^ := (AddrA)^ + (AddrB)^ * Value; + (AddrA+1)^ := (AddrA+1)^ + (AddrB+1)^ * Value; + (AddrA+2)^ := (AddrA+2)^ + (AddrB+2)^ * Value; + (AddrA+3)^ := (AddrA+3)^ + (AddrB+3)^ * Value; + (AddrA+4)^ := (AddrA+4)^ + (AddrB+4)^ * Value; + (AddrA+5)^ := (AddrA+5)^ + (AddrB+5)^ * Value; + (AddrA+6)^ := (AddrA+6)^ + (AddrB+6)^ * Value; + (AddrA+7)^ := (AddrA+7)^ + (AddrB+7)^ * Value; + BasePos := BasePos + 8; + AddrA := AddrA + 8; + AddrB := AddrB + 8; + end; + + while BasePos <= vHigh - 3 do + begin + (AddrA)^ := (AddrA)^ + (AddrB)^ * Value; + (AddrA+1)^ := (AddrA+1)^ + (AddrB+1)^ * Value; + (AddrA+2)^ := (AddrA+2)^ + (AddrB+2)^ * Value; + (AddrA+3)^ := (AddrA+3)^ + (AddrB+3)^ * Value; + BasePos := BasePos + 4; + AddrA := AddrA + 4; + AddrB := AddrB + 4; + end; + {$ENDIF} + + if BasePos <= vHigh then for I := BasePos to vHigh do + begin + //Write(PtrA^[I],' ', PtrB^[I],' ', Value,'->'); + {$IFDEF FPC} + PtrA^[I] += PtrB^[I]*Value; + {$ELSE} + PtrA^[I] := PtrA^[I] + PtrB^[I]*Value; + {$ENDIF} + //WriteLn(PtrA^[I]); + end; +end; + class function TVolume.Product(PtrA: TNeuralFloatArrPtr; NumElements: integer): Single; var @@ -11122,7 +11146,6 @@ procedure TNNetVolumePairList.SetItem(Index: Integer; AObject: TNNetVolumePair); end; {$ENDIF} - // ########################################### // #### Initialize cpu set variables // ########################################### @@ -11133,12 +11156,12 @@ initialization locAVX512 := IsAVX512Present; {$IF DEFINED(FPC)} - locFmtSet := DefaultFormatSettings; + locDataFmtSet := DefaultFormatSettings; {$ELSE} {$IF (CompilerVersion <= 21)} GetLocaleFormatSettings(0, locDataFmtSet); {$ELSE} - locFmtSet := TFormatSettings.Create; + locDataFmtSet := TFormatSettings.Create; {$IFEND} {$IFEND} locDataFmtSet.DecimalSeparator := '.';