/**
* Validate some data.
*
* @param string|array $fields
* @param string|array $rules
* @return bool
*/
function validate($fields, $rules)
{
<?php | |
class RetryTest extends TestCase | |
{ | |
public function setUp() | |
{ | |
parent::setUp(); | |
// abuse superglobal to keep track of state | |
$_GET['a'] = 0; | |
} |
<?php | |
/** | |
* Gera a paginação dos itens de um array ou collection. | |
* | |
* @param array|Collection $items | |
* @param int $perPage | |
* @param int $page | |
* @param array $options | |
* | |
* @return LengthAwarePaginator |
""" | |
Code adapted from and inspired by http://blog.ranman.org/cleaning-up-aws-with-boto3/. | |
""" | |
import os | |
import re | |
from datetime import datetime, timedelta | |
import boto3 | |
FROM php:5.6-cli | |
RUN apt-get update \ | |
&& apt-get install -y \ | |
libfreetype6-dev \ | |
&& rm -rf /var/lib/apt/lists/* \ | |
&& docker-php-ext-install iconv \ | |
&& apt-get remove -y \ | |
libfreetype6-dev \ | |
&& apt-get install -y \ |
ssh-keygen -t rsa -b 4096 -m PEM -f jwtRS256.key | |
# Don't add passphrase | |
openssl rsa -in jwtRS256.key -pubout -outform PEM -out jwtRS256.key.pub | |
cat jwtRS256.key | |
cat jwtRS256.key.pub |
<?php | |
# Fill our vars and run on cli | |
# $ php -f db-connect-test.php | |
$dbname = 'name'; | |
$dbuser = 'user'; | |
$dbpass = 'pass'; | |
$dbhost = 'host'; | |
$connect = mysql_connect($dbhost, $dbuser, $dbpass) or die("Unable to Connect to '$dbhost'"); |
.video-container { | |
position: relative; | |
padding-bottom: 56.25%; /*16:9*/ | |
padding-top: 30px; | |
height: 0; | |
overflow: hidden; | |
} | |
.video-container iframe, | |
.video-container object, |
Moved to git repository: https://github.com/denji/nginx-tuning
For this configuration you can use web server you like, i decided, because i work mostly with it to use nginx.
Generally, properly configured nginx can handle up to 400K to 500K requests per second (clustered), most what i saw is 50K to 80K (non-clustered) requests per second and 30% CPU load, course, this was 2 x Intel Xeon
with HyperThreading enabled, but it can work without problem on slower machines.
You must understand that this config is used in testing environment and not in production so you will need to find a way to implement most of those features best possible for your servers.
Note: This was written using elasticsearch 0.9.
Elasticsearch will automatically create an index (with basic settings and mappings) for you if you post a first document:
$ curl -X POST 'http://localhost:9200/thegame/weapons/1' -d \
'{
"_id": 1,