example 1
with data as (
select
suppress_and_count_repeats (
'pipes',
'pipes_organization_member',
'org_handle',
'user_handle, user_id, status',
example 1
with data as (
select
suppress_and_count_repeats (
'pipes',
'pipes_organization_member',
'org_handle',
'user_handle, user_id, status',
CREATE OR REPLACE FUNCTION suppress_and_count_repeats( | |
schema_name TEXT, -- Schema name of the target table | |
table_name TEXT, -- Name of the target table | |
partition_by_column TEXT, -- Column to partition by | |
order_by_columns TEXT, -- Columns to order by | |
additional_column_names TEXT[] -- Array of additional column names | |
) RETURNS SETOF JSON AS $$ | |
DECLARE | |
generated_query TEXT; -- Variable to store the generated SQL query | |
additional_selects TEXT; -- Variable to store the dynamically constructed additional select statements |
WITH cte AS ( | |
SELECT | |
org_handle, | |
user_handle, | |
status, | |
ROW_NUMBER() OVER (PARTITION BY org_handle ORDER BY user_handle) AS rn, | |
DENSE_RANK() OVER (ORDER BY org_handle) AS org_rank | |
FROM | |
pipes_organization_member | |
) |
import sys | |
import json | |
import base64 | |
import os | |
from openai import OpenAI | |
# OpenAI API key | |
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") | |
client = OpenAI(api_key=OPENAI_API_KEY) |
import openai | |
import json | |
import os | |
from openai import OpenAI | |
client = OpenAI(api_key=os.environ.get("OPENAI_API_KEY")) | |
# Mock function to simulate fetching weather data from an API | |
def get_weather(location): | |
# In a real scenario, replace this with an actual API call |
func listDevices(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { | |
var visitor ListPredicate = func(client *kolide.Client, cursor string, limit int32, searches ...kolide.Search) (interface{}, error) { | |
return client.GetDevices(cursor, limit, searches...) | |
} | |
return listAnything(ctx, d, h, "kolide_device.listDevices", visitor, "Devices") | |
} | |
func listAdminUsers(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData) (interface{}, error) { |
func listAnything(ctx context.Context, d *plugin.QueryData, h *plugin.HydrateData, callee string, visitor ListPredicate, target string) (interface{}, error) { | |
// Create a slice to hold search queries | |
searches, err := query(ctx, d) | |
if err != nil { | |
plugin.Logger(ctx).Error(callee, "qualifier_operator_error", err) | |
return nil, err | |
} | |
// Establish connection to Kolide client | |
client, err := connect(ctx, d) |
You are a SQL tutor who helps people write Steampipe queries that involve JSON columns. Such queries can be hard to understand, so we want to provide queries in two forms: concise and expanded.
For example, we want a query to count my gists by language.
Here is the schema for the github_my_gist table
ctx jsonb Steampipe context in JSON form, e.g. connection_name.
comments bigint The number of comments for the gist.
created_at timestamp with time zone The timestamp when the gist was created.
You are a writing coach who helps companies improve their business communication.
I'll show you an example of a successful intervention you've made to improve this poorly-written press release.
Remcom Broadens Capabilities Of Schematic Editor To Support Diplex Matched Antennas In XFdtd EM Simulation Software
Remcom announces expanded capabilities within its schematic editor in the latest release of XFdtd 3D EM Simulation Software, including support for diplex matched antennas and new efficiencies aimed at streamlining antenna design workflows and shortening design cycles.
XFdtd' schematic editor is a novel electromagnetic simulation tool that combines matching network analysis with full-wave results, making it ideal for complex antenna design applications involving multi-state and multi-port aperture or impedance tuners and corporate feed networks with digital phase shifters.
Today I learned that * * * * *
and select random()
are your friends when starting a Flowpipe mod that uses a query trigger.
* * * * *
to iterate as fast as possible: every minute
select random()
to ensure there is always fresh data, otherwise nothing will happen
If you're trying to debug something downstream, like sending email, this ensures you'll actually test the downstream thing every time.
Here's the foundation for a mod that will check for new access keys and alert when a new one is found. I used the test
pipeline as an initial check, then switched to the email
pipeline in order to debug separate issues with that (it's always about auth, right?) once I knew the pipeline would receive a message every minute.