Safe Haskell | None |
---|---|
Language | Haskell2010 |
Execute a Select query against the BigQuery REST API.
Synopsis
- data RecordSet = RecordSet {
- rows :: Vector (InsOrdHashMap FieldNameText OutputValue)
- wantedFields :: Maybe [Text]
- newtype FieldNameText = FieldNameText Text
- data OutputValue
- = DecimalOutputValue Decimal
- | BigDecimalOutputValue BigDecimal
- | IntegerOutputValue Int64
- | FloatOutputValue Float64
- | GeographyOutputValue Geography
- | TextOutputValue Text
- | TimestampOutputValue Timestamp
- | DateOutputValue Date
- | TimeOutputValue Time
- | DatetimeOutputValue Datetime
- | BytesOutputValue Base64
- | BoolOutputValue Bool
- | ArrayOutputValue (Vector OutputValue)
- | RecordOutputValue (InsOrdHashMap FieldNameText OutputValue)
- | NullOutputValue
- data ExecuteReader = ExecuteReader {}
- data ExecuteProblem
- data ShowDetails
- executeProblemMessage :: ShowDetails -> ExecuteProblem -> Text
- newtype Execute a = Execute {}
- data BigQueryType
- data BigQuery = BigQuery {
- query :: Text
- parameters :: InsOrdHashMap ParameterName Parameter
- data Parameter = Parameter {
- typ :: BigQueryType
- value :: Value
- newtype ParameterName = ParameterName Text
- data BigQueryField = BigQueryField {
- name :: FieldNameText
- typ :: BigQueryFieldType
- mode :: Mode
- data BigQueryFieldType
- data Mode
- data IsNullable
- streamDelaySeconds :: DiffTime
- bigQueryProjectUrl :: Text -> String
- runExecute :: MonadIO m => BigQuerySourceConfig -> Execute RecordSet -> m (Either ExecuteProblem RecordSet)
- executeSelect :: Select -> Execute RecordSet
- getFinalRecordSet :: RecordSet -> Execute RecordSet
- selectToBigQuery :: Select -> BigQuery
- valueType :: Value -> BigQueryType
- valueToBigQueryJson :: Value -> Value
- streamBigQuery :: MonadIO m => BigQueryConnection -> BigQuery -> m (Either ExecuteProblem RecordSet)
- executeBigQuery :: MonadIO m => BigQueryConnection -> BigQuery -> m (Either ExecuteProblem ())
- data JobResults = JobResults {}
- data JobResultsResponse
- data Fetch = Fetch {}
- getJobResults :: MonadIO m => BigQueryConnection -> Job -> Fetch -> m (Either ExecuteProblem JobResultsResponse)
- data Job = Job {}
- jsonRequestHeader :: Request -> Request
- createQueryJob :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> BigQuery -> m Job
- data Dataset = Dataset {
- datasetId :: Text
- deleteDataset :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> Text -> m ()
- runBigQueryExcept :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> Request -> m (Response ByteString)
- insertDataset :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> Text -> m Dataset
- parseAsJsonOrText :: ByteString -> Value
- parseRecordSetPayload :: Object -> Parser RecordSet
- parseRow :: Vector BigQueryField -> Value -> Parser (InsOrdHashMap FieldNameText OutputValue)
- parseBigQueryRow :: Vector BigQueryField -> Value -> Parser OutputValue
- parseBigQueryValue :: IsNullable -> BigQueryFieldType -> Value -> Parser OutputValue
- parseTimestamp :: Value -> Parser Timestamp
- parseBigQueryField :: BigQueryField -> Value -> Parser (FieldNameText, OutputValue)
- has_v :: IsNullable -> (Value -> Parser OutputValue) -> Value -> Parser OutputValue
- has_v_generic :: (Value -> Parser a) -> Value -> Parser a
Documentation
A set of records produced by the database. These are joined together. There are all sorts of optimizations possible here, from using a matrix/flat vector, unboxed sums for Value, etc. Presently we choose a naive implementation in the interest of getting other work done.
RecordSet | |
|
newtype FieldNameText Source #
As opposed to BigQuery.FieldName which is a qualified name, this is just the unqualified text name itself.
FieldNameText Text |
Instances
data OutputValue Source #
Instances
data ExecuteProblem Source #
GetJobDecodeProblem String | |
CreateQueryJobDecodeProblem String | |
InsertDatasetDecodeProblem String | |
ExecuteRunBigQueryProblem BigQueryProblem | |
RESTRequestNonOK Status Value |
Instances
data ShowDetails Source #
We use this to hide certain details from the front-end, while allowing them in tests. We have not actually decided whether showing the details is insecure, but until we decide otherwise, it's probably best to err on the side of caution.
executeProblemMessage :: ShowDetails -> ExecuteProblem -> Text Source #
Execute monad; as queries are performed, the record sets are stored in the map.
Instances
Monad Execute Source # | |
Functor Execute Source # | |
Applicative Execute Source # | |
MonadIO Execute Source # | |
Defined in Hasura.Backends.BigQuery.Execute | |
MonadError ExecuteProblem Execute Source # | |
Defined in Hasura.Backends.BigQuery.Execute throwError :: ExecuteProblem -> Execute a # catchError :: Execute a -> (ExecuteProblem -> Execute a) -> Execute a # |
data BigQueryType Source #
Big query parameters must be accompanied by an explicit type signature.
BigQuery | |
|
newtype ParameterName Source #
ParameterName Text |
data BigQueryField Source #
BigQueryField | |
|
data BigQueryFieldType Source #
streamDelaySeconds :: DiffTime Source #
Delay between attempts to get job results if the job is incomplete.
bigQueryProjectUrl :: Text -> String Source #
runExecute :: MonadIO m => BigQuerySourceConfig -> Execute RecordSet -> m (Either ExecuteProblem RecordSet) Source #
getFinalRecordSet :: RecordSet -> Execute RecordSet Source #
This is needed to strip out unneeded fields (join keys) in the final query. This is a relic of the data loader approach. A later improvement would be to update the FromIr code to explicitly reselect the query. But the purpose of this commit is to drop the dataloader code and not modify the from IR code which is more delicate.
selectToBigQuery :: Select -> BigQuery Source #
valueType :: Value -> BigQueryType Source #
Make a BigQuery type for the given value.
valueToBigQueryJson :: Value -> Value Source #
Make a JSON representation of the type of the given value.
streamBigQuery :: MonadIO m => BigQueryConnection -> BigQuery -> m (Either ExecuteProblem RecordSet) Source #
TODO: WARNING: This function hasn't been tested on Big Data(tm),
and therefore I was unable to get BigQuery to produce paginated
results that would contain the $sel:pageToken:JobResults
field in the JSON
response. Until that test has been done, we should consider this a
preliminary implementation.
executeBigQuery :: MonadIO m => BigQueryConnection -> BigQuery -> m (Either ExecuteProblem ()) Source #
Execute a query without expecting any output (e.g. CREATE TABLE or INSERT)
data JobResults Source #
getJobResults :: MonadIO m => BigQueryConnection -> Job -> Fetch -> m (Either ExecuteProblem JobResultsResponse) Source #
Get results of a job.
jsonRequestHeader :: Request -> Request Source #
Make a Request return JSON
createQueryJob :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> BigQuery -> m Job Source #
Create a job asynchronously.
deleteDataset :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> Text -> m () Source #
Delete a dataset
runBigQueryExcept :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> Request -> m (Response ByteString) Source #
Run request and map errors into ExecuteProblem
insertDataset :: (MonadError ExecuteProblem m, MonadIO m) => BigQueryConnection -> Text -> m Dataset Source #
Insert a new dataset
parseAsJsonOrText :: ByteString -> Value Source #
Parse given @ByteString
as JSON value. If not a valid JSON, encode to plain text.
parseRecordSetPayload :: Object -> Parser RecordSet Source #
parseRow :: Vector BigQueryField -> Value -> Parser (InsOrdHashMap FieldNameText OutputValue) Source #
parseBigQueryRow :: Vector BigQueryField -> Value -> Parser OutputValue Source #
Parse a row, which at the top-level of the "rows" output has no
{"v":..} wrapper. But when appearing nestedly, does have the
wrapper. See parseBigQueryValue
.
parseBigQueryValue :: IsNullable -> BigQueryFieldType -> Value -> Parser OutputValue Source #
parseTimestamp :: Value -> Parser Timestamp Source #
Parse upstream timestamp value in epoch milliseconds and convert it to calendar date time format https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#timestamp_type
parseBigQueryField :: BigQueryField -> Value -> Parser (FieldNameText, OutputValue) Source #
has_v :: IsNullable -> (Value -> Parser OutputValue) -> Value -> Parser OutputValue Source #
has_v_generic :: (Value -> Parser a) -> Value -> Parser a Source #