domingo, 12 de outubro de 2025

emu8086 - count

/* Main.c file generated by New Project wizard

 *

 * Created:   sáb out 11 2025

 * Processor: 8086

 * Compiler:  Digital Mars C

 *

 * Before starting simulation set Internal Memory Size 

 * in the 8086 model properties to 0x10000

 */



// robot base i/o port:

#define r_port 9


unsigned int count = 0;


void Robot(void)

{

   asm {


//===================================


mov ax,0

mov dx,199

out dx,ax


eternal_loop:

      

cmp count,5

je Happy      

      

// wait until robot

// is ready:

call wait_robot


// examine the area

// in front of the robot:

mov al, 4

out r_port, al


call wait_exam


// get result from

// data register:

in al, r_port + 1


// nothing found?

cmp al, 0

je cont  // - yes, so continue.


// wall?

cmp al, 255  

je cont  // - yes, so continue.


// switched-on lamp?

cmp al, 7

jne lamp_off  // - no, so skip.

// - yes, so switch it off,

//   and turn:

//call switch_off_lamp 

jmp  cont  // continue


lamp_off: nop


// if gets here, then we have

// switched-off lamp, because

// all other situations checked

// already:

call switch_on_lamp


cont:

call random_turn


call wait_robot


// try to step forward:

mov al, 1

out r_port, al


call wait_robot


// try to step forward again:

mov al, 1

out r_port, al


jmp eternal_loop // go again!


//===================================


// this procedure does not

// return until robot is ready

// to receive next command:

wait_robot:

// check if robot busy:

busy: in al, r_port+2

      test al, 0b00000010

      jnz busy // busy, so wait.

ret    


//===================================


// this procedure does not

// return until robot completes

// the examination:

wait_exam:

// check if has new data:

busy2: in al, r_port+2

       test al, 0b00000001

       jz busy2 // no new data, so wait.

ret    



//===================================


// switch off the lamp:

switch_off_lamp:

mov al, 6

out r_port, al

ret



//===================================


// switch on the lamp:

switch_on_lamp:


syncronize: 

mov al, 5

out r_port, al

// wait until robot

// is ready:

call wait_robot

// examine the area

// in front of the robot:

mov al, 4

out r_port, al


call wait_exam


// get result from

// data register:

in al, r_port + 1


cmp al,8

je syncronize

cmp al,255

je syncronize

          

inc count


mov ax,count

mov dx,199

out dx,ax          


ret




//===================================


// generates a random turn using

// system timer:

random_turn:


// get number of clock

// ticks since midnight

// in cx:dx

mov ah, 0

int 1ah


// randomize using xor:

xor dh, dl

xor ch, cl

xor ch, dh


test ch, 2

jz no_turn


test ch, 1

jnz turn_right


// turn left:

mov al, 2

out r_port, al

// exit from procedure:

ret  


turn_right:

mov al, 3

out r_port, al


no_turn:

ret


//===================================


Happy:

     mov al, 3

     out r_port, al

     call wait_robot

// try to step forward:

mov al, 1

out r_port, al


call wait_robot

     jmp Happy

   }

   

}



void main(void)

 {

   // Write your code here

   Robot();

   while (1)

   {

   }

 }

20BYJ46



STEP MOTOR 20BYJ46

VEMELHO (12V)

AZUL (D1)

ROSA (D2)

AMARELO (D3)

LARANJA (D4)






REF: STM32驱动小型4相步进电机(ULN2003+20BYJ46)-CSDN博客

28BYJ-48 Stepper Motor Pinout Wiring, Specifications, Uses Guide & Datasheet

domingo, 5 de outubro de 2025

PT TO CVIMODEL

    


Geração do .pt


Entre em

E nele, basicamente deves pegar o ZIP (yolov11) gerado no ROBOFLOW, descompactar, instalar o ULTRALYTICS (yolo)
%pip install "ultralytics<=8.3.40" supervision roboflow
# prevent ultralytics from tracking your activity
!yolo settings sync=False
import ultralytics
ultralytics.checks()
..após isto executar o Script
!yolo task=detect mode=train model=yolo11n.pt data=data.yaml epochs=200 imgsz=640



Salve o .pt gerado

Instale o TPU MLIR e ULTRALYTICS em seu PC

docker run --privileged --name recamera -v /workspace -it sophgo/tpuc_dev:v3.1 on /workspace sudo apt-get update sudo apt-get upgrade pip install tpu_mlir[all]==1.7 git clone https://github.com/sophgo/tpu-mlir.git cd tpu-mlir source ./envsetup.sh ./build.sh mkdir model_yolo11n && cd model_yolo11n cp -rf ${REGRESSION_PATH}/dataset/COCO2017 . cp -rf ${REGRESSION_PATH}/image . mkdir Workspace && cd Workspace pip install ultralytics (crie em /dataset/ o nome de uma pasta com as imagens treinadas) (dentro de /image/copie uma imagem das imagens treinadas) ### git clone https://github.com/Seeed-Studio/sscma-example-sg200x.git ### cd sscma-example-sg200x/scripts ### (copy into this folder the best.onnx) ### python export.py --output_names "/model.23/cv2.0/cv2.0.2/Conv_output_0,/model.23/cv3.0/cv3.0.2/Conv_output_0,/model.23/cv2.1/cv2.1.2/Conv_output_0,/model.23/cv3.1/cv3.1.2/Conv_output_0,/model.23/cv2.2/cv2.2.2/Conv_output_0,/model.23/cv3.2/cv3.2.2/Conv_output_0" --dataset ../../../../tpu-mlir/regression/dataset/BUGGIO --test_input ../../../../tpu-mlir/regression/image/Ades_2-4_jpg.rf.4de8403c125c5d16b435a839a3a93780.jpg best.onnx dentro do /workspace copie o best.pt (gerado no Colab) execute yolo export model=best.pt format=onnx imgsz=640,640 será gerado um best.onnx execute model_transform \ --model_name yolo11n \ --model_def best.onnx \ --input_shapes "[[1,3,640,640]]" \ --mean "0.0,0.0,0.0" \ --scale "0.0039216,0.0039216,0.0039216" \ --keep_aspect_ratio \ --pixel_format rgb \ --output_names "/model.23/cv2.0/cv2.0.2/Conv_output_0,/model.23/cv3.0/cv3.0.2/Conv_output_0,/model.23/cv2.1/cv2.1.2/Conv_output_0,/model.23/cv3.1/cv3.1.2/Conv_output_0,/model.23/cv2.2/cv2.2.2/Conv_output_0,/model.23/cv3.2/cv3.2.2/Conv_output_0" \ --test_input ../../../tpu-mlir/regression/image/Ades_2-4_jpg.rf.4de8403c125c5d16b435a839a3a93780.jpg \ --test_result yolo11n_top_outputs.npz \ --mlir yolo11n.mlir execute run_calibration \ yolo11n.mlir \ --dataset ../BUGGIO \ --input_num 100 \ -o yolo11n_calib_table execute model_deploy \ --mlir yolo11n.mlir \ --quantize INT8 \ --quant_input \ --processor cv181x \ --calibration_table yolo11n_calib_table \ --test_input ../../../tpu-mlir/regression/image/Ades_2-4_jpg.rf.4de8403c125c5d16b435a839a3a93780.jpg \ --test_reference yolo11n_top_outputs.npz \ --customization_format RGB_PACKED \ --fuse_preprocess \ --aligned_input \ --model yolo11n_1684x_int8_sym.cvimodel cvimodel se encontra em /workspace/tpu-mlir/model_yolo11n/Workspace \\wsl.localhost\docker-desktop\mnt\docker-desktop-disk\data\docker\volumes\8bd5aab9644ef6f95e9275c42306a33a7313a58d86c9a1a7ab20e4a24f5649aa\_data\tpu-mlir\model_yolo11n\Workspace ### o script do RECAMERA







REF:

Sobre a SMARTCORE

A SMARTCORE FORNECE CHIPS E MÓDULOS PARA IOT, COMUNICAÇÃO WIRELESS, BIOMETRIA, CONECTIVIDADE, RASTREAMENTO E AUTOMAÇÃO. NOSSO PORTFÓLIO INCLUI MODEM 2G/3G/4G/NB-IOT, SATELITAL, MÓDULOS WIFI, BLUETOOTH, GPS, SIGFOX, LORA, LEITOR DE CARTÃO, LEITOR QR CCODE, MECANISMO DE IMPRESSÃO, MINI-BOARD PC, ANTENA, PIGTAIL, BATERIA, REPETIDOR GPS E SENSORES