Skip to content

Commit

Permalink
Merge pull request zino-hofmann#332 from micimize/eager_results
Browse files Browse the repository at this point in the history
Eager results, QueryResultSource, MultiSourceResult runMutation future
  • Loading branch information
micimize authored Jun 10, 2019
2 parents ab085a7 + 6b344a3 commit d928c66
Show file tree
Hide file tree
Showing 8 changed files with 229 additions and 111 deletions.
39 changes: 28 additions & 11 deletions packages/graphql/lib/src/core/observable_query.dart
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,18 @@ class ObservableQuery {
@required this.queryManager,
@required this.options,
}) : queryId = queryManager.generateQueryId().toString() {
if (options.eagerlyFetchResults) {
_latestWasEagerlyFetched = true;
fetchResults();
}
controller = StreamController<QueryResult>.broadcast(
onListen: onListen,
);
}

// set to true when eagerly fetched to prevent back-to-back queries
bool _latestWasEagerlyFetched = false;

final String queryId;
final QueryManager queryManager;

Expand All @@ -39,7 +46,8 @@ class ObservableQuery {
final Set<StreamSubscription<QueryResult>> _onDataSubscriptions =
<StreamSubscription<QueryResult>>{};

QueryResult previousResult;
/// The most recently seen result from this operation's stream
QueryResult latestResult;

QueryLifecycle lifecycle = QueryLifecycle.UNEXECUTED;

Expand Down Expand Up @@ -94,13 +102,19 @@ class ObservableQuery {
}

void onListen() {
if (_latestWasEagerlyFetched) {
_latestWasEagerlyFetched = false;
return;
}
if (options.fetchResults) {
fetchResults();
}
}

void fetchResults() {
queryManager.fetchQuery(queryId, options);
MultiSourceResult fetchResults() {
final MultiSourceResult allResults =
queryManager.fetchQueryAsMultiSourceResult(queryId, options);
latestResult ??= allResults.eagerResult;

// if onData callbacks have been registered,
// they are waited on by default
Expand All @@ -111,30 +125,33 @@ class ObservableQuery {
if (options.pollInterval != null && options.pollInterval > 0) {
startPolling(options.pollInterval);
}

return allResults;
}

/// add a result to the stream,
/// copying `loading` and `optimistic`
/// from the `previousResult` if they aren't set.
/// from the `latestResult` if they aren't set.
void addResult(QueryResult result) {
// don't overwrite results due to some async/optimism issue
if (previousResult != null &&
previousResult.timestamp.isAfter(result.timestamp)) {
if (latestResult != null &&
latestResult.timestamp.isAfter(result.timestamp)) {
return;
}

if (previousResult != null) {
result.loading ??= previousResult.loading;
result.optimistic ??= previousResult.optimistic;
if (latestResult != null) {
result.source ??= latestResult.source;
}

if (lifecycle == QueryLifecycle.PENDING && result.optimistic != true) {
lifecycle = QueryLifecycle.COMPLETED;
}

previousResult = result;
latestResult = result;

controller.add(result);
if (!controller.isClosed) {
controller.add(result);
}
}

// most mutation behavior happens here
Expand Down
172 changes: 105 additions & 67 deletions packages/graphql/lib/src/core/query_manager.dart
Original file line number Diff line number Diff line change
Expand Up @@ -64,34 +64,47 @@ class QueryManager {
String queryId,
BaseOptions options,
) async {
// create a new operation to fetch
final Operation operation = Operation.fromOptions(options);
final MultiSourceResult allResults =
fetchQueryAsMultiSourceResult(queryId, options);
return allResults.networkResult ?? allResults.eagerResult;
}

if (options.optimisticResult != null) {
addOptimisticQueryResult(
queryId,
cacheKey: operation.toKey(),
optimisticResult: options.optimisticResult,
);
}
/// Wrap both the `eagerResult` and `networkResult` future in a `MultiSourceResult`
/// if the cache policy precludes a network request, `networkResult` will be `null`
MultiSourceResult fetchQueryAsMultiSourceResult(
String queryId,
BaseOptions options,
) {
final QueryResult eagerResult = _resolveQueryEagerly(
queryId,
options,
);

// _resolveQueryEagerly handles cacheOnly,
// so if we're loading + cacheFirst we continue to network
return MultiSourceResult(
eagerResult: eagerResult,
networkResult:
(shouldStopAtCache(options.fetchPolicy) && !eagerResult.loading)
? null
: _resolveQueryOnNetwork(queryId, options),
);
}

/// Resolve the query on the network,
/// negotiating any necessary cache edits / optimistic cleanup
Future<QueryResult> _resolveQueryOnNetwork(
String queryId,
BaseOptions options,
) async {
// create a new operation to fetch
final Operation operation = Operation.fromOptions(options)
..setContext(options.context);

FetchResult fetchResult;
QueryResult queryResult;

try {
if (options.context != null) {
operation.setContext(options.context);
}
queryResult = _addEagerCacheResult(
queryId,
operation.toKey(),
options.fetchPolicy,
);

if (shouldStopAtCache(options.fetchPolicy) && queryResult != null) {
return queryResult;
}

// execute the operation through the provided link(s)
fetchResult = await execute(
link: link,
Expand Down Expand Up @@ -119,14 +132,11 @@ class QueryManager {
queryResult = mapFetchResultToQueryResult(
fetchResult,
options,
loading: false,
optimistic: false,
source: QueryResultSource.Network,
);
} catch (error) {
queryResult ??= QueryResult(
loading: false,
optimistic: false,
);
// we set the source to indicate where the source of failure
queryResult ??= QueryResult(source: QueryResultSource.Network);
queryResult.addError(_attemptToWrapError(error));
}

Expand All @@ -143,6 +153,66 @@ class QueryManager {
return queryResult;
}

/// Add an eager cache response to the stream if possible,
/// based on `fetchPolicy` and `optimisticResults`
QueryResult _resolveQueryEagerly(
String queryId,
BaseOptions options,
) {
final String cacheKey = options.toKey();

QueryResult queryResult = QueryResult(loading: true);

try {
if (options.optimisticResult != null) {
queryResult = _getOptimisticQueryResult(
queryId,
cacheKey: cacheKey,
optimisticResult: options.optimisticResult,
);
}

// if we haven't already resolved results optimistically,
// we attempt to resolve the from the cache
if (shouldRespondEagerlyFromCache(options.fetchPolicy) &&
!queryResult.optimistic) {
final dynamic data = cache.read(cacheKey);
// we only push an eager query with data
if (data != null) {
queryResult = QueryResult(
data: data,
source: QueryResultSource.Cache,
);
}

if (options.fetchPolicy == FetchPolicy.cacheOnly &&
queryResult.loading) {
queryResult = QueryResult(
source: QueryResultSource.Cache,
errors: [
GraphQLError(
message:
'Could not find that operation in the cache. (FetchPolicy.cacheOnly)',
),
],
);
}
}
} catch (error) {
queryResult.addError(_attemptToWrapError(error));
}

// If not a regular eager cache resolution,
// will either be loading, or optimistic.
//
// if there's an optimistic result, we add it regardless of fetchPolicy
// This is undefined-ish behavior/edge case, but still better than just
// ignoring a provided optimisticResult.
// Would probably be better to add it ignoring the cache in such cases
addQueryResult(queryId, queryResult);
return queryResult;
}

void refetchQuery(String queryId) {
final WatchQueryOptions options = queries[queryId].options;
fetchQuery(queryId, options);
Expand Down Expand Up @@ -180,38 +250,8 @@ class QueryManager {
}
}

// TODO what should the relationship to optimism be here
// TODO we should switch to quiver Optionals
/// Add an eager cache response to the stream if possible based on `fetchPolicy`
QueryResult _addEagerCacheResult(
String queryId, String cacheKey, FetchPolicy fetchPolicy) {
if (shouldRespondEagerlyFromCache(fetchPolicy)) {
final dynamic cachedData = cache.read(cacheKey);

if (cachedData != null) {
// we're rebroadcasting from cache,
// so don't override optimism
final QueryResult queryResult = QueryResult(
data: cachedData,
loading: false,
);

addQueryResult(queryId, queryResult);

return queryResult;
}

if (fetchPolicy == FetchPolicy.cacheOnly) {
throw Exception(
'Could not find that operation in the cache. (${fetchPolicy.toString()})',
);
}
}
return null;
}

/// Add an optimstic result to the query specified by `queryId`, if it exists
void addOptimisticQueryResult(
/// Create an optimstic result for the query specified by `queryId`, if it exists
QueryResult _getOptimisticQueryResult(
String queryId, {
@required String cacheKey,
@required Object optimisticResult,
Expand All @@ -224,10 +264,9 @@ class QueryManager {

final QueryResult queryResult = QueryResult(
data: cache.read(cacheKey),
loading: false,
optimistic: true,
source: QueryResultSource.OptimisticResult,
);
addQueryResult(queryId, queryResult);
return queryResult;
}

/// Remove the optimistic patch for `cacheKey`, if any
Expand All @@ -250,6 +289,7 @@ class QueryManager {
mapFetchResultToQueryResult(
FetchResult(data: cachedData),
query.options,
source: QueryResultSource.Cache,
),
);
}
Expand Down Expand Up @@ -279,8 +319,7 @@ class QueryManager {
QueryResult mapFetchResultToQueryResult(
FetchResult fetchResult,
BaseOptions options, {
bool loading,
bool optimistic = false,
@required QueryResultSource source,
}) {
List<GraphQLError> errors;
dynamic data;
Expand Down Expand Up @@ -312,8 +351,7 @@ class QueryManager {
return QueryResult(
data: data,
errors: errors,
loading: loading,
optimistic: optimistic,
source: source,
);
}

Expand Down
8 changes: 6 additions & 2 deletions packages/graphql/lib/src/core/query_options.dart
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,8 @@ class WatchQueryOptions extends QueryOptions {
ErrorPolicy errorPolicy = ErrorPolicy.none,
Object optimisticResult,
int pollInterval,
this.fetchResults,
this.fetchResults = false,
this.eagerlyFetchResults,
Map<String, dynamic> context,
}) : super(
document: document,
Expand All @@ -125,10 +126,13 @@ class WatchQueryOptions extends QueryOptions {
pollInterval: pollInterval,
context: context,
optimisticResult: optimisticResult,
);
) {
this.eagerlyFetchResults ??= fetchResults;
}

/// Whether or not to fetch result.
bool fetchResults;
bool eagerlyFetchResults;

/// Checks if the [WatchQueryOptions] in this class are equal to some given options.
bool areEqualTo(WatchQueryOptions otherOptions) {
Expand Down
Loading

0 comments on commit d928c66

Please sign in to comment.