Skip to content

Instantly share code, notes, and snippets.

View tiandiao123's full-sized avatar

Cuiqing Li (李崔卿) tiandiao123

  • Shanghai, China
View GitHub Profile
#include <future>
#include <iostream>
using namespace std;
int factorial(int N){
if(N == 1){
return 1;
}
#include <iostream>
#include <thread>
#include <future>
#include <deque>
using namespace std;
std::deque<std::packaged_task<int()>> task_q;
std::mutex mu;
std::condition_variable cond;
#include <string>
#include <iostream>
#include <future>
#include <thread>
using namespace std;
void TestFuture(promise<string> p){
this_thread::sleep_for(3s);
cout << "begin setting value ... " << endl;
#include <iostream>
#include <thread>
#include <mutex>
using namespace std;
void SystemInit(){
cout << "hello world! "<< endl;
}
#include <thread>
#include <iostream>
using namespace std;
class MyThread
{
public:
void Main(){
cout << "my thread main fuction" << name << " : " << age << endl;
}
# modified codes from https://www.bilibili.com/video/BV1oa411b7c9/?spm_id_from=333.999.0.0&vd_source=0e3bf657889554538e4fce27455e8e66
import os
import asyncio
import time
async def print_info(waiting_time, info):
await asyncio.sleep(waiting_time)
return "{} --- {}".format(waiting_time, info)
# the following codes are modified from https://www.bilibili.com/video/BV155411V7tj/?spm_id_from=333.337.search-card.all.click&vd_source=0e3bf657889554538e4fce27455e8e66
import os
import asyncio
import time
class GatherApples:
def __init__(self):
self.apples = [1]
async def ask_for_apple(self):
import tensorflow as tf
saved_model_dir = "/Users/cuiqingli123/Workspace/torch_op_exp/mobile_net_v3_small"
# Convert the model
converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir) # path to the SavedModel directory
converter.optimizations = [tf.lite.Optimize.DEFAULT]
tflite_model = converter.convert()
import tensorflow as tf
import numpy as np
def representative_dataset():
for _ in range(100):
data = np.random.rand(1, 224, 224, 3)
yield [data.astype(np.float32)]
saved_model_dir = "/Users/cuiqingli123/Workspace/torch_op_exp/mobile_net_v3_small"
import tensorflow as tf
saved_model_dir = "/Users/cuiqingli123/Workspace/torch_op_exp/mobile_net_v3_small"
# Convert the model
converter = tf.lite.TFLiteConverter.from_saved_model(saved_model_dir) # path to the SavedModel directory
converter.optimizations = [tf.lite.Optimize.DEFAULT]
tflite_model = converter.convert()
# Save the model.