Tue, 06 Jan 2015 21:39:09 +0100
Conditionally force memory storage according to privacy.thirdparty.isolate;
This solves Tor bug #9701, complying with disk avoidance documented in
https://www.torproject.org/projects/torbrowser/design/#disk-avoidance.
1 /* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*-
2 * This Source Code Form is subject to the terms of the Mozilla Public
3 * License, v. 2.0. If a copy of the MPL was not distributed with this
4 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
6 #include "DrawTargetCairo.h"
8 #include "SourceSurfaceCairo.h"
9 #include "PathCairo.h"
10 #include "HelpersCairo.h"
11 #include "ScaledFontBase.h"
12 #include "BorrowedContext.h"
13 #include "FilterNodeSoftware.h"
15 #include "cairo.h"
16 #include "cairo-tee.h"
17 #include <string.h>
19 #include "Blur.h"
20 #include "Logging.h"
21 #include "Tools.h"
23 #ifdef CAIRO_HAS_QUARTZ_SURFACE
24 #include "cairo-quartz.h"
25 #include <ApplicationServices/ApplicationServices.h>
26 #endif
28 #ifdef CAIRO_HAS_XLIB_SURFACE
29 #include "cairo-xlib.h"
30 #endif
32 #ifdef CAIRO_HAS_WIN32_SURFACE
33 #include "cairo-win32.h"
34 #endif
36 #include <algorithm>
38 namespace mozilla {
39 namespace gfx {
41 cairo_surface_t *DrawTargetCairo::mDummySurface;
43 namespace {
45 // An RAII class to prepare to draw a context and optional path. Saves and
46 // restores the context on construction/destruction.
47 class AutoPrepareForDrawing
48 {
49 public:
50 AutoPrepareForDrawing(DrawTargetCairo* dt, cairo_t* ctx)
51 : mCtx(ctx)
52 {
53 dt->PrepareForDrawing(ctx);
54 cairo_save(mCtx);
55 MOZ_ASSERT(cairo_status(mCtx) || dt->GetTransform() == GetTransform());
56 }
58 AutoPrepareForDrawing(DrawTargetCairo* dt, cairo_t* ctx, const Path* path)
59 : mCtx(ctx)
60 {
61 dt->PrepareForDrawing(ctx, path);
62 cairo_save(mCtx);
63 MOZ_ASSERT(cairo_status(mCtx) || dt->GetTransform() == GetTransform());
64 }
66 ~AutoPrepareForDrawing() { cairo_restore(mCtx); }
68 private:
69 #ifdef DEBUG
70 Matrix GetTransform()
71 {
72 cairo_matrix_t mat;
73 cairo_get_matrix(mCtx, &mat);
74 return Matrix(mat.xx, mat.yx, mat.xy, mat.yy, mat.x0, mat.y0);
75 }
76 #endif
78 cairo_t* mCtx;
79 };
82 } // end anonymous namespace
84 static bool
85 SupportsSelfCopy(cairo_surface_t* surface)
86 {
87 switch (cairo_surface_get_type(surface))
88 {
89 #ifdef CAIRO_HAS_QUARTZ_SURFACE
90 case CAIRO_SURFACE_TYPE_QUARTZ:
91 return true;
92 #endif
93 #ifdef CAIRO_HAS_WIN32_SURFACE
94 case CAIRO_SURFACE_TYPE_WIN32:
95 case CAIRO_SURFACE_TYPE_WIN32_PRINTING:
96 return true;
97 #endif
98 default:
99 return false;
100 }
101 }
103 static bool
104 PatternIsCompatible(const Pattern& aPattern)
105 {
106 switch (aPattern.GetType())
107 {
108 case PatternType::LINEAR_GRADIENT:
109 {
110 const LinearGradientPattern& pattern = static_cast<const LinearGradientPattern&>(aPattern);
111 return pattern.mStops->GetBackendType() == BackendType::CAIRO;
112 }
113 case PatternType::RADIAL_GRADIENT:
114 {
115 const RadialGradientPattern& pattern = static_cast<const RadialGradientPattern&>(aPattern);
116 return pattern.mStops->GetBackendType() == BackendType::CAIRO;
117 }
118 default:
119 return true;
120 }
121 }
123 static cairo_user_data_key_t surfaceDataKey;
125 void
126 ReleaseData(void* aData)
127 {
128 static_cast<DataSourceSurface*>(aData)->Release();
129 }
131 /**
132 * Returns cairo surface for the given SourceSurface.
133 * If possible, it will use the cairo_surface associated with aSurface,
134 * otherwise, it will create a new cairo_surface.
135 * In either case, the caller must call cairo_surface_destroy on the
136 * result when it is done with it.
137 */
138 cairo_surface_t*
139 GetCairoSurfaceForSourceSurface(SourceSurface *aSurface, bool aExistingOnly = false)
140 {
141 if (aSurface->GetType() == SurfaceType::CAIRO) {
142 cairo_surface_t* surf = static_cast<SourceSurfaceCairo*>(aSurface)->GetSurface();
143 cairo_surface_reference(surf);
144 return surf;
145 }
147 if (aSurface->GetType() == SurfaceType::CAIRO_IMAGE) {
148 cairo_surface_t* surf =
149 static_cast<const DataSourceSurfaceCairo*>(aSurface)->GetSurface();
150 cairo_surface_reference(surf);
151 return surf;
152 }
154 if (aExistingOnly) {
155 return nullptr;
156 }
158 RefPtr<DataSourceSurface> data = aSurface->GetDataSurface();
159 if (!data) {
160 return nullptr;
161 }
163 cairo_surface_t* surf =
164 cairo_image_surface_create_for_data(data->GetData(),
165 GfxFormatToCairoFormat(data->GetFormat()),
166 data->GetSize().width,
167 data->GetSize().height,
168 data->Stride());
170 // In certain scenarios, requesting larger than 8k image fails. Bug 803568
171 // covers the details of how to run into it, but the full detailed
172 // investigation hasn't been done to determine the underlying cause. We
173 // will just handle the failure to allocate the surface to avoid a crash.
174 if (cairo_surface_status(surf)) {
175 return nullptr;
176 }
178 cairo_surface_set_user_data(surf,
179 &surfaceDataKey,
180 data.forget().drop(),
181 ReleaseData);
182 return surf;
183 }
185 // An RAII class to temporarily clear any device offset set
186 // on a surface. Note that this does not take a reference to the
187 // surface.
188 class AutoClearDeviceOffset
189 {
190 public:
191 AutoClearDeviceOffset(SourceSurface* aSurface)
192 : mSurface(nullptr)
193 {
194 Init(aSurface);
195 }
197 AutoClearDeviceOffset(const Pattern& aPattern)
198 : mSurface(nullptr)
199 {
200 if (aPattern.GetType() == PatternType::SURFACE) {
201 const SurfacePattern& pattern = static_cast<const SurfacePattern&>(aPattern);
202 Init(pattern.mSurface);
203 }
204 }
206 ~AutoClearDeviceOffset()
207 {
208 if (mSurface) {
209 cairo_surface_set_device_offset(mSurface, mX, mY);
210 }
211 }
213 private:
214 void Init(SourceSurface* aSurface)
215 {
216 cairo_surface_t* surface = GetCairoSurfaceForSourceSurface(aSurface, true);
217 if (surface) {
218 Init(surface);
219 cairo_surface_destroy(surface);
220 }
221 }
223 void Init(cairo_surface_t *aSurface)
224 {
225 mSurface = aSurface;
226 cairo_surface_get_device_offset(mSurface, &mX, &mY);
227 cairo_surface_set_device_offset(mSurface, 0, 0);
228 }
230 cairo_surface_t* mSurface;
231 double mX;
232 double mY;
233 };
235 // Never returns nullptr. As such, you must always pass in Cairo-compatible
236 // patterns, most notably gradients with a GradientStopCairo.
237 // The pattern returned must have cairo_pattern_destroy() called on it by the
238 // caller.
239 // As the cairo_pattern_t returned may depend on the Pattern passed in, the
240 // lifetime of the cairo_pattern_t returned must not exceed the lifetime of the
241 // Pattern passed in.
242 static cairo_pattern_t*
243 GfxPatternToCairoPattern(const Pattern& aPattern, Float aAlpha)
244 {
245 cairo_pattern_t* pat;
246 const Matrix* matrix = nullptr;
248 switch (aPattern.GetType())
249 {
250 case PatternType::COLOR:
251 {
252 Color color = static_cast<const ColorPattern&>(aPattern).mColor;
253 pat = cairo_pattern_create_rgba(color.r, color.g, color.b, color.a * aAlpha);
254 break;
255 }
257 case PatternType::SURFACE:
258 {
259 const SurfacePattern& pattern = static_cast<const SurfacePattern&>(aPattern);
260 cairo_surface_t* surf = GetCairoSurfaceForSourceSurface(pattern.mSurface);
261 if (!surf)
262 return nullptr;
264 pat = cairo_pattern_create_for_surface(surf);
266 matrix = &pattern.mMatrix;
268 cairo_pattern_set_filter(pat, GfxFilterToCairoFilter(pattern.mFilter));
269 cairo_pattern_set_extend(pat, GfxExtendToCairoExtend(pattern.mExtendMode));
271 cairo_surface_destroy(surf);
272 break;
273 }
274 case PatternType::LINEAR_GRADIENT:
275 {
276 const LinearGradientPattern& pattern = static_cast<const LinearGradientPattern&>(aPattern);
278 pat = cairo_pattern_create_linear(pattern.mBegin.x, pattern.mBegin.y,
279 pattern.mEnd.x, pattern.mEnd.y);
281 MOZ_ASSERT(pattern.mStops->GetBackendType() == BackendType::CAIRO);
282 GradientStopsCairo* cairoStops = static_cast<GradientStopsCairo*>(pattern.mStops.get());
283 cairo_pattern_set_extend(pat, GfxExtendToCairoExtend(cairoStops->GetExtendMode()));
285 matrix = &pattern.mMatrix;
287 const std::vector<GradientStop>& stops = cairoStops->GetStops();
288 for (size_t i = 0; i < stops.size(); ++i) {
289 const GradientStop& stop = stops[i];
290 cairo_pattern_add_color_stop_rgba(pat, stop.offset, stop.color.r,
291 stop.color.g, stop.color.b,
292 stop.color.a);
293 }
295 break;
296 }
297 case PatternType::RADIAL_GRADIENT:
298 {
299 const RadialGradientPattern& pattern = static_cast<const RadialGradientPattern&>(aPattern);
301 pat = cairo_pattern_create_radial(pattern.mCenter1.x, pattern.mCenter1.y, pattern.mRadius1,
302 pattern.mCenter2.x, pattern.mCenter2.y, pattern.mRadius2);
304 MOZ_ASSERT(pattern.mStops->GetBackendType() == BackendType::CAIRO);
305 GradientStopsCairo* cairoStops = static_cast<GradientStopsCairo*>(pattern.mStops.get());
306 cairo_pattern_set_extend(pat, GfxExtendToCairoExtend(cairoStops->GetExtendMode()));
308 matrix = &pattern.mMatrix;
310 const std::vector<GradientStop>& stops = cairoStops->GetStops();
311 for (size_t i = 0; i < stops.size(); ++i) {
312 const GradientStop& stop = stops[i];
313 cairo_pattern_add_color_stop_rgba(pat, stop.offset, stop.color.r,
314 stop.color.g, stop.color.b,
315 stop.color.a);
316 }
318 break;
319 }
320 default:
321 {
322 // We should support all pattern types!
323 MOZ_ASSERT(false);
324 }
325 }
327 // The pattern matrix is a matrix that transforms the pattern into user
328 // space. Cairo takes a matrix that converts from user space to pattern
329 // space. Cairo therefore needs the inverse.
330 if (matrix) {
331 cairo_matrix_t mat;
332 GfxMatrixToCairoMatrix(*matrix, mat);
333 cairo_matrix_invert(&mat);
334 cairo_pattern_set_matrix(pat, &mat);
335 }
337 return pat;
338 }
340 static bool
341 NeedIntermediateSurface(const Pattern& aPattern, const DrawOptions& aOptions)
342 {
343 // We pre-multiply colours' alpha by the global alpha, so we don't need to
344 // use an intermediate surface for them.
345 if (aPattern.GetType() == PatternType::COLOR)
346 return false;
348 if (aOptions.mAlpha == 1.0)
349 return false;
351 return true;
352 }
354 DrawTargetCairo::DrawTargetCairo()
355 : mContext(nullptr)
356 , mSurface(nullptr)
357 , mLockedBits(nullptr)
358 {
359 }
361 DrawTargetCairo::~DrawTargetCairo()
362 {
363 cairo_destroy(mContext);
364 if (mSurface) {
365 cairo_surface_destroy(mSurface);
366 }
367 MOZ_ASSERT(!mLockedBits);
368 }
370 IntSize
371 DrawTargetCairo::GetSize()
372 {
373 return mSize;
374 }
376 TemporaryRef<SourceSurface>
377 DrawTargetCairo::Snapshot()
378 {
379 if (mSnapshot) {
380 return mSnapshot;
381 }
383 IntSize size = GetSize();
385 cairo_content_t content = cairo_surface_get_content(mSurface);
386 mSnapshot = new SourceSurfaceCairo(mSurface,
387 size,
388 CairoContentToGfxFormat(content),
389 this);
390 return mSnapshot;
391 }
393 bool
394 DrawTargetCairo::LockBits(uint8_t** aData, IntSize* aSize,
395 int32_t* aStride, SurfaceFormat* aFormat)
396 {
397 if (cairo_surface_get_type(mSurface) == CAIRO_SURFACE_TYPE_IMAGE) {
398 WillChange();
400 mLockedBits = cairo_image_surface_get_data(mSurface);
401 *aData = mLockedBits;
402 *aSize = GetSize();
403 *aStride = cairo_image_surface_get_stride(mSurface);
404 *aFormat = GetFormat();
405 return true;
406 }
408 return false;
409 }
411 void
412 DrawTargetCairo::ReleaseBits(uint8_t* aData)
413 {
414 MOZ_ASSERT(mLockedBits == aData);
415 mLockedBits = nullptr;
416 }
418 void
419 DrawTargetCairo::Flush()
420 {
421 cairo_surface_t* surf = cairo_get_target(mContext);
422 cairo_surface_flush(surf);
423 }
425 void
426 DrawTargetCairo::PrepareForDrawing(cairo_t* aContext, const Path* aPath /* = nullptr */)
427 {
428 WillChange(aPath);
429 }
431 cairo_surface_t*
432 DrawTargetCairo::GetDummySurface()
433 {
434 if (mDummySurface) {
435 return mDummySurface;
436 }
438 mDummySurface = cairo_image_surface_create(CAIRO_FORMAT_ARGB32, 1, 1);
440 return mDummySurface;
441 }
443 void
444 DrawTargetCairo::DrawSurface(SourceSurface *aSurface,
445 const Rect &aDest,
446 const Rect &aSource,
447 const DrawSurfaceOptions &aSurfOptions,
448 const DrawOptions &aOptions)
449 {
450 AutoPrepareForDrawing prep(this, mContext);
451 AutoClearDeviceOffset clear(aSurface);
453 float sx = aSource.Width() / aDest.Width();
454 float sy = aSource.Height() / aDest.Height();
456 cairo_matrix_t src_mat;
457 cairo_matrix_init_translate(&src_mat, aSource.X(), aSource.Y());
458 cairo_matrix_scale(&src_mat, sx, sy);
460 cairo_surface_t* surf = GetCairoSurfaceForSourceSurface(aSurface);
461 cairo_pattern_t* pat = cairo_pattern_create_for_surface(surf);
462 cairo_surface_destroy(surf);
464 cairo_pattern_set_matrix(pat, &src_mat);
465 cairo_pattern_set_filter(pat, GfxFilterToCairoFilter(aSurfOptions.mFilter));
466 cairo_pattern_set_extend(pat, CAIRO_EXTEND_PAD);
468 cairo_set_antialias(mContext, GfxAntialiasToCairoAntialias(aOptions.mAntialiasMode));
470 // If the destination rect covers the entire clipped area, then unbounded and bounded
471 // operations are identical, and we don't need to push a group.
472 bool needsGroup = !IsOperatorBoundByMask(aOptions.mCompositionOp) &&
473 !aDest.Contains(GetUserSpaceClip());
475 cairo_translate(mContext, aDest.X(), aDest.Y());
477 if (needsGroup) {
478 cairo_push_group(mContext);
479 cairo_new_path(mContext);
480 cairo_rectangle(mContext, 0, 0, aDest.Width(), aDest.Height());
481 cairo_set_source(mContext, pat);
482 cairo_fill(mContext);
483 cairo_pop_group_to_source(mContext);
484 } else {
485 cairo_new_path(mContext);
486 cairo_rectangle(mContext, 0, 0, aDest.Width(), aDest.Height());
487 cairo_clip(mContext);
488 cairo_set_source(mContext, pat);
489 }
491 cairo_set_operator(mContext, GfxOpToCairoOp(aOptions.mCompositionOp));
493 cairo_paint_with_alpha(mContext, aOptions.mAlpha);
495 cairo_pattern_destroy(pat);
496 }
498 void
499 DrawTargetCairo::DrawFilter(FilterNode *aNode,
500 const Rect &aSourceRect,
501 const Point &aDestPoint,
502 const DrawOptions &aOptions)
503 {
504 FilterNodeSoftware* filter = static_cast<FilterNodeSoftware*>(aNode);
505 filter->Draw(this, aSourceRect, aDestPoint, aOptions);
506 }
508 void
509 DrawTargetCairo::DrawSurfaceWithShadow(SourceSurface *aSurface,
510 const Point &aDest,
511 const Color &aColor,
512 const Point &aOffset,
513 Float aSigma,
514 CompositionOp aOperator)
515 {
516 if (aSurface->GetType() != SurfaceType::CAIRO) {
517 return;
518 }
520 AutoClearDeviceOffset clear(aSurface);
522 Float width = Float(aSurface->GetSize().width);
523 Float height = Float(aSurface->GetSize().height);
525 SourceSurfaceCairo* source = static_cast<SourceSurfaceCairo*>(aSurface);
526 cairo_surface_t* sourcesurf = source->GetSurface();
527 cairo_surface_t* blursurf;
528 cairo_surface_t* surf;
530 // We only use the A8 surface for blurred shadows. Unblurred shadows can just
531 // use the RGBA surface directly.
532 if (cairo_surface_get_type(sourcesurf) == CAIRO_SURFACE_TYPE_TEE) {
533 blursurf = cairo_tee_surface_index(sourcesurf, 0);
534 surf = cairo_tee_surface_index(sourcesurf, 1);
536 MOZ_ASSERT(cairo_surface_get_type(blursurf) == CAIRO_SURFACE_TYPE_IMAGE);
537 Rect extents(0, 0, width, height);
538 AlphaBoxBlur blur(extents,
539 cairo_image_surface_get_stride(blursurf),
540 aSigma, aSigma);
541 blur.Blur(cairo_image_surface_get_data(blursurf));
542 } else {
543 blursurf = sourcesurf;
544 surf = sourcesurf;
545 }
547 WillChange();
548 ClearSurfaceForUnboundedSource(aOperator);
550 cairo_save(mContext);
551 cairo_set_operator(mContext, GfxOpToCairoOp(aOperator));
552 cairo_identity_matrix(mContext);
553 cairo_translate(mContext, aDest.x, aDest.y);
555 if (IsOperatorBoundByMask(aOperator)){
556 cairo_set_source_rgba(mContext, aColor.r, aColor.g, aColor.b, aColor.a);
557 cairo_mask_surface(mContext, blursurf, aOffset.x, aOffset.y);
559 // Now that the shadow has been drawn, we can draw the surface on top.
560 cairo_set_source_surface(mContext, surf, 0, 0);
561 cairo_new_path(mContext);
562 cairo_rectangle(mContext, 0, 0, width, height);
563 cairo_fill(mContext);
564 } else {
565 cairo_push_group(mContext);
566 cairo_set_source_rgba(mContext, aColor.r, aColor.g, aColor.b, aColor.a);
567 cairo_mask_surface(mContext, blursurf, aOffset.x, aOffset.y);
569 // Now that the shadow has been drawn, we can draw the surface on top.
570 cairo_set_source_surface(mContext, surf, 0, 0);
571 cairo_new_path(mContext);
572 cairo_rectangle(mContext, 0, 0, width, height);
573 cairo_fill(mContext);
574 cairo_pop_group_to_source(mContext);
575 cairo_paint(mContext);
576 }
578 cairo_restore(mContext);
579 }
581 void
582 DrawTargetCairo::DrawPattern(const Pattern& aPattern,
583 const StrokeOptions& aStrokeOptions,
584 const DrawOptions& aOptions,
585 DrawPatternType aDrawType,
586 bool aPathBoundsClip)
587 {
588 if (!PatternIsCompatible(aPattern)) {
589 return;
590 }
592 AutoClearDeviceOffset clear(aPattern);
594 cairo_pattern_t* pat = GfxPatternToCairoPattern(aPattern, aOptions.mAlpha);
595 if (!pat) {
596 return;
597 }
598 if (cairo_pattern_status(pat)) {
599 cairo_pattern_destroy(pat);
600 gfxWarning() << "Invalid pattern";
601 return;
602 }
604 cairo_set_source(mContext, pat);
606 cairo_set_antialias(mContext, GfxAntialiasToCairoAntialias(aOptions.mAntialiasMode));
608 if (NeedIntermediateSurface(aPattern, aOptions) ||
609 (!IsOperatorBoundByMask(aOptions.mCompositionOp) && !aPathBoundsClip)) {
610 cairo_push_group_with_content(mContext, CAIRO_CONTENT_COLOR_ALPHA);
612 // Don't want operators to be applied twice
613 cairo_set_operator(mContext, CAIRO_OPERATOR_OVER);
615 if (aDrawType == DRAW_STROKE) {
616 SetCairoStrokeOptions(mContext, aStrokeOptions);
617 cairo_stroke_preserve(mContext);
618 } else {
619 cairo_fill_preserve(mContext);
620 }
622 cairo_pop_group_to_source(mContext);
624 // Now draw the content using the desired operator
625 cairo_set_operator(mContext, GfxOpToCairoOp(aOptions.mCompositionOp));
626 cairo_paint_with_alpha(mContext, aOptions.mAlpha);
627 } else {
628 cairo_set_operator(mContext, GfxOpToCairoOp(aOptions.mCompositionOp));
630 if (aDrawType == DRAW_STROKE) {
631 SetCairoStrokeOptions(mContext, aStrokeOptions);
632 cairo_stroke_preserve(mContext);
633 } else {
634 cairo_fill_preserve(mContext);
635 }
636 }
638 cairo_pattern_destroy(pat);
639 }
641 void
642 DrawTargetCairo::FillRect(const Rect &aRect,
643 const Pattern &aPattern,
644 const DrawOptions &aOptions)
645 {
646 AutoPrepareForDrawing prep(this, mContext);
648 cairo_new_path(mContext);
649 cairo_rectangle(mContext, aRect.x, aRect.y, aRect.Width(), aRect.Height());
651 bool pathBoundsClip = false;
653 if (aRect.Contains(GetUserSpaceClip())) {
654 pathBoundsClip = true;
655 }
657 DrawPattern(aPattern, StrokeOptions(), aOptions, DRAW_FILL, pathBoundsClip);
658 }
660 void
661 DrawTargetCairo::CopySurfaceInternal(cairo_surface_t* aSurface,
662 const IntRect &aSource,
663 const IntPoint &aDest)
664 {
665 if (cairo_surface_status(aSurface)) {
666 gfxWarning() << "Invalid surface";
667 return;
668 }
670 cairo_identity_matrix(mContext);
672 cairo_set_source_surface(mContext, aSurface, aDest.x - aSource.x, aDest.y - aSource.y);
673 cairo_set_operator(mContext, CAIRO_OPERATOR_SOURCE);
674 cairo_set_antialias(mContext, CAIRO_ANTIALIAS_NONE);
676 cairo_reset_clip(mContext);
677 cairo_new_path(mContext);
678 cairo_rectangle(mContext, aDest.x, aDest.y, aSource.width, aSource.height);
679 cairo_fill(mContext);
680 }
682 void
683 DrawTargetCairo::CopySurface(SourceSurface *aSurface,
684 const IntRect &aSource,
685 const IntPoint &aDest)
686 {
687 AutoPrepareForDrawing prep(this, mContext);
688 AutoClearDeviceOffset clear(aSurface);
690 if (!aSurface) {
691 gfxWarning() << "Unsupported surface type specified";
692 return;
693 }
695 cairo_surface_t* surf = GetCairoSurfaceForSourceSurface(aSurface);
696 if (!surf) {
697 gfxWarning() << "Unsupported surface type specified";
698 return;
699 }
701 CopySurfaceInternal(surf, aSource, aDest);
702 cairo_surface_destroy(surf);
703 }
705 void
706 DrawTargetCairo::CopyRect(const IntRect &aSource,
707 const IntPoint &aDest)
708 {
709 AutoPrepareForDrawing prep(this, mContext);
711 IntRect source = aSource;
712 cairo_surface_t* surf = mSurface;
714 if (!SupportsSelfCopy(mSurface) &&
715 aDest.y >= aSource.y &&
716 aDest.y < aSource.YMost()) {
717 cairo_surface_t* similar = cairo_surface_create_similar(mSurface,
718 GfxFormatToCairoContent(GetFormat()),
719 aSource.width, aSource.height);
720 cairo_t* ctx = cairo_create(similar);
721 cairo_set_operator(ctx, CAIRO_OPERATOR_SOURCE);
722 cairo_set_source_surface(ctx, surf, -aSource.x, -aSource.y);
723 cairo_paint(ctx);
724 cairo_destroy(ctx);
726 source.x = 0;
727 source.y = 0;
728 surf = similar;
729 }
731 CopySurfaceInternal(surf, source, aDest);
733 if (surf != mSurface) {
734 cairo_surface_destroy(surf);
735 }
736 }
738 void
739 DrawTargetCairo::ClearRect(const Rect& aRect)
740 {
741 AutoPrepareForDrawing prep(this, mContext);
743 cairo_set_antialias(mContext, CAIRO_ANTIALIAS_NONE);
744 cairo_new_path(mContext);
745 cairo_set_operator(mContext, CAIRO_OPERATOR_CLEAR);
746 cairo_rectangle(mContext, aRect.X(), aRect.Y(),
747 aRect.Width(), aRect.Height());
748 cairo_fill(mContext);
749 }
751 void
752 DrawTargetCairo::StrokeRect(const Rect &aRect,
753 const Pattern &aPattern,
754 const StrokeOptions &aStrokeOptions /* = StrokeOptions() */,
755 const DrawOptions &aOptions /* = DrawOptions() */)
756 {
757 AutoPrepareForDrawing prep(this, mContext);
759 cairo_new_path(mContext);
760 cairo_rectangle(mContext, aRect.x, aRect.y, aRect.Width(), aRect.Height());
762 DrawPattern(aPattern, aStrokeOptions, aOptions, DRAW_STROKE);
763 }
765 void
766 DrawTargetCairo::StrokeLine(const Point &aStart,
767 const Point &aEnd,
768 const Pattern &aPattern,
769 const StrokeOptions &aStrokeOptions /* = StrokeOptions() */,
770 const DrawOptions &aOptions /* = DrawOptions() */)
771 {
772 AutoPrepareForDrawing prep(this, mContext);
774 cairo_new_path(mContext);
775 cairo_move_to(mContext, aStart.x, aStart.y);
776 cairo_line_to(mContext, aEnd.x, aEnd.y);
778 DrawPattern(aPattern, aStrokeOptions, aOptions, DRAW_STROKE);
779 }
781 void
782 DrawTargetCairo::Stroke(const Path *aPath,
783 const Pattern &aPattern,
784 const StrokeOptions &aStrokeOptions /* = StrokeOptions() */,
785 const DrawOptions &aOptions /* = DrawOptions() */)
786 {
787 AutoPrepareForDrawing prep(this, mContext, aPath);
789 if (aPath->GetBackendType() != BackendType::CAIRO)
790 return;
792 PathCairo* path = const_cast<PathCairo*>(static_cast<const PathCairo*>(aPath));
793 path->SetPathOnContext(mContext);
795 DrawPattern(aPattern, aStrokeOptions, aOptions, DRAW_STROKE);
796 }
798 void
799 DrawTargetCairo::Fill(const Path *aPath,
800 const Pattern &aPattern,
801 const DrawOptions &aOptions /* = DrawOptions() */)
802 {
803 AutoPrepareForDrawing prep(this, mContext, aPath);
805 if (aPath->GetBackendType() != BackendType::CAIRO)
806 return;
808 PathCairo* path = const_cast<PathCairo*>(static_cast<const PathCairo*>(aPath));
809 path->SetPathOnContext(mContext);
811 DrawPattern(aPattern, StrokeOptions(), aOptions, DRAW_FILL);
812 }
814 void
815 DrawTargetCairo::SetPermitSubpixelAA(bool aPermitSubpixelAA)
816 {
817 DrawTarget::SetPermitSubpixelAA(aPermitSubpixelAA);
818 #ifdef MOZ_TREE_CAIRO
819 cairo_surface_set_subpixel_antialiasing(mSurface,
820 aPermitSubpixelAA ? CAIRO_SUBPIXEL_ANTIALIASING_ENABLED : CAIRO_SUBPIXEL_ANTIALIASING_DISABLED);
821 #endif
822 }
824 void
825 DrawTargetCairo::FillGlyphs(ScaledFont *aFont,
826 const GlyphBuffer &aBuffer,
827 const Pattern &aPattern,
828 const DrawOptions &aOptions,
829 const GlyphRenderingOptions*)
830 {
831 AutoPrepareForDrawing prep(this, mContext);
832 AutoClearDeviceOffset clear(aPattern);
834 ScaledFontBase* scaledFont = static_cast<ScaledFontBase*>(aFont);
835 cairo_set_scaled_font(mContext, scaledFont->GetCairoScaledFont());
837 cairo_pattern_t* pat = GfxPatternToCairoPattern(aPattern, aOptions.mAlpha);
838 if (!pat)
839 return;
841 cairo_set_source(mContext, pat);
842 cairo_pattern_destroy(pat);
844 cairo_set_antialias(mContext, GfxAntialiasToCairoAntialias(aOptions.mAntialiasMode));
846 // Convert our GlyphBuffer into an array of Cairo glyphs.
847 std::vector<cairo_glyph_t> glyphs(aBuffer.mNumGlyphs);
848 for (uint32_t i = 0; i < aBuffer.mNumGlyphs; ++i) {
849 glyphs[i].index = aBuffer.mGlyphs[i].mIndex;
850 glyphs[i].x = aBuffer.mGlyphs[i].mPosition.x;
851 glyphs[i].y = aBuffer.mGlyphs[i].mPosition.y;
852 }
854 cairo_show_glyphs(mContext, &glyphs[0], aBuffer.mNumGlyphs);
855 }
857 void
858 DrawTargetCairo::Mask(const Pattern &aSource,
859 const Pattern &aMask,
860 const DrawOptions &aOptions /* = DrawOptions() */)
861 {
862 AutoPrepareForDrawing prep(this, mContext);
863 AutoClearDeviceOffset clearSource(aSource);
864 AutoClearDeviceOffset clearMask(aMask);
866 cairo_set_antialias(mContext, GfxAntialiasToCairoAntialias(aOptions.mAntialiasMode));
868 cairo_pattern_t* source = GfxPatternToCairoPattern(aSource, aOptions.mAlpha);
869 if (!source) {
870 return;
871 }
873 cairo_pattern_t* mask = GfxPatternToCairoPattern(aMask, aOptions.mAlpha);
874 if (!mask) {
875 cairo_pattern_destroy(source);
876 return;
877 }
879 if (cairo_pattern_status(source) || cairo_pattern_status(mask)) {
880 cairo_pattern_destroy(source);
881 cairo_pattern_destroy(mask);
882 gfxWarning() << "Invalid pattern";
883 return;
884 }
886 cairo_set_source(mContext, source);
887 cairo_mask(mContext, mask);
889 cairo_pattern_destroy(mask);
890 cairo_pattern_destroy(source);
891 }
893 void
894 DrawTargetCairo::MaskSurface(const Pattern &aSource,
895 SourceSurface *aMask,
896 Point aOffset,
897 const DrawOptions &aOptions)
898 {
899 AutoPrepareForDrawing prep(this, mContext);
900 AutoClearDeviceOffset clearSource(aSource);
901 AutoClearDeviceOffset clearMask(aMask);
903 if (!PatternIsCompatible(aSource)) {
904 return;
905 }
907 cairo_set_antialias(mContext, GfxAntialiasToCairoAntialias(aOptions.mAntialiasMode));
909 cairo_pattern_t* pat = GfxPatternToCairoPattern(aSource, aOptions.mAlpha);
910 if (!pat) {
911 return;
912 }
914 if (cairo_pattern_status(pat)) {
915 cairo_pattern_destroy(pat);
916 gfxWarning() << "Invalid pattern";
917 return;
918 }
920 cairo_set_source(mContext, pat);
922 if (NeedIntermediateSurface(aSource, aOptions)) {
923 cairo_push_group_with_content(mContext, CAIRO_CONTENT_COLOR_ALPHA);
925 // Don't want operators to be applied twice
926 cairo_set_operator(mContext, CAIRO_OPERATOR_OVER);
928 // Now draw the content using the desired operator
929 cairo_paint_with_alpha(mContext, aOptions.mAlpha);
931 cairo_pop_group_to_source(mContext);
932 }
934 cairo_surface_t* surf = GetCairoSurfaceForSourceSurface(aMask);
935 if (!surf) {
936 cairo_pattern_destroy(pat);
937 return;
938 }
939 cairo_pattern_t* mask = cairo_pattern_create_for_surface(surf);
940 cairo_matrix_t matrix;
942 cairo_matrix_init_translate (&matrix, -aOffset.x, -aOffset.y);
943 cairo_pattern_set_matrix (mask, &matrix);
945 cairo_set_operator(mContext, GfxOpToCairoOp(aOptions.mCompositionOp));
947 cairo_mask(mContext, mask);
949 cairo_surface_destroy(surf);
950 cairo_pattern_destroy(mask);
951 cairo_pattern_destroy(pat);
952 }
954 void
955 DrawTargetCairo::PushClip(const Path *aPath)
956 {
957 if (aPath->GetBackendType() != BackendType::CAIRO) {
958 return;
959 }
961 WillChange(aPath);
962 cairo_save(mContext);
964 PathCairo* path = const_cast<PathCairo*>(static_cast<const PathCairo*>(aPath));
965 path->SetPathOnContext(mContext);
966 cairo_clip_preserve(mContext);
967 }
969 void
970 DrawTargetCairo::PushClipRect(const Rect& aRect)
971 {
972 WillChange();
973 cairo_save(mContext);
975 cairo_new_path(mContext);
976 cairo_rectangle(mContext, aRect.X(), aRect.Y(), aRect.Width(), aRect.Height());
977 cairo_clip_preserve(mContext);
978 }
980 void
981 DrawTargetCairo::PopClip()
982 {
983 // save/restore does not affect the path, so no need to call WillChange()
985 // cairo_restore will restore the transform too and we don't want to do that
986 // so we'll save it now and restore it after the cairo_restore
987 cairo_matrix_t mat;
988 cairo_get_matrix(mContext, &mat);
990 cairo_restore(mContext);
992 cairo_set_matrix(mContext, &mat);
994 MOZ_ASSERT(cairo_status(mContext) || GetTransform() == Matrix(mat.xx, mat.yx, mat.xy, mat.yy, mat.x0, mat.y0),
995 "Transforms are out of sync");
996 }
998 TemporaryRef<PathBuilder>
999 DrawTargetCairo::CreatePathBuilder(FillRule aFillRule /* = FillRule::FILL_WINDING */) const
1000 {
1001 RefPtr<PathBuilderCairo> builder = new PathBuilderCairo(aFillRule);
1003 return builder;
1004 }
1006 void
1007 DrawTargetCairo::ClearSurfaceForUnboundedSource(const CompositionOp &aOperator)
1008 {
1009 if (aOperator != CompositionOp::OP_SOURCE)
1010 return;
1011 cairo_set_operator(mContext, CAIRO_OPERATOR_CLEAR);
1012 // It doesn't really matter what the source is here, since Paint
1013 // isn't bounded by the source and the mask covers the entire clip
1014 // region.
1015 cairo_paint(mContext);
1016 }
1019 TemporaryRef<GradientStops>
1020 DrawTargetCairo::CreateGradientStops(GradientStop *aStops, uint32_t aNumStops,
1021 ExtendMode aExtendMode) const
1022 {
1023 RefPtr<GradientStopsCairo> stops = new GradientStopsCairo(aStops, aNumStops,
1024 aExtendMode);
1025 return stops;
1026 }
1028 TemporaryRef<FilterNode>
1029 DrawTargetCairo::CreateFilter(FilterType aType)
1030 {
1031 return FilterNodeSoftware::Create(aType);
1032 }
1034 /**
1035 * Copies pixel data from aData into aSurface; aData must have the dimensions
1036 * given in aSize, with a stride of aStride bytes and aPixelWidth bytes per pixel
1037 */
1038 static void
1039 CopyDataToCairoSurface(cairo_surface_t* aSurface,
1040 unsigned char *aData,
1041 const IntSize &aSize,
1042 int32_t aStride,
1043 int32_t aPixelWidth)
1044 {
1045 unsigned char* surfData = cairo_image_surface_get_data(aSurface);
1046 int surfStride = cairo_image_surface_get_stride(aSurface);
1047 // In certain scenarios, requesting larger than 8k image fails. Bug 803568
1048 // covers the details of how to run into it, but the full detailed
1049 // investigation hasn't been done to determine the underlying cause. We
1050 // will just handle the failure to allocate the surface to avoid a crash.
1051 if (!surfData) {
1052 return;
1053 }
1054 for (int32_t y = 0; y < aSize.height; ++y) {
1055 memcpy(surfData + y * surfStride,
1056 aData + y * aStride,
1057 aSize.width * aPixelWidth);
1058 }
1059 cairo_surface_mark_dirty(aSurface);
1060 }
1062 TemporaryRef<SourceSurface>
1063 DrawTargetCairo::CreateSourceSurfaceFromData(unsigned char *aData,
1064 const IntSize &aSize,
1065 int32_t aStride,
1066 SurfaceFormat aFormat) const
1067 {
1068 cairo_surface_t* surf = cairo_image_surface_create(GfxFormatToCairoFormat(aFormat),
1069 aSize.width,
1070 aSize.height);
1071 // In certain scenarios, requesting larger than 8k image fails. Bug 803568
1072 // covers the details of how to run into it, but the full detailed
1073 // investigation hasn't been done to determine the underlying cause. We
1074 // will just handle the failure to allocate the surface to avoid a crash.
1075 if (cairo_surface_status(surf)) {
1076 return nullptr;
1077 }
1079 CopyDataToCairoSurface(surf, aData, aSize, aStride, BytesPerPixel(aFormat));
1081 RefPtr<SourceSurfaceCairo> source_surf = new SourceSurfaceCairo(surf, aSize, aFormat);
1082 cairo_surface_destroy(surf);
1084 return source_surf;
1085 }
1087 TemporaryRef<SourceSurface>
1088 DrawTargetCairo::OptimizeSourceSurface(SourceSurface *aSurface) const
1089 {
1090 return aSurface;
1091 }
1093 TemporaryRef<SourceSurface>
1094 DrawTargetCairo::CreateSourceSurfaceFromNativeSurface(const NativeSurface &aSurface) const
1095 {
1096 if (aSurface.mType == NativeSurfaceType::CAIRO_SURFACE) {
1097 if (aSurface.mSize.width <= 0 ||
1098 aSurface.mSize.height <= 0) {
1099 gfxWarning() << "Can't create a SourceSurface without a valid size";
1100 return nullptr;
1101 }
1102 cairo_surface_t* surf = static_cast<cairo_surface_t*>(aSurface.mSurface);
1103 RefPtr<SourceSurfaceCairo> source =
1104 new SourceSurfaceCairo(surf, aSurface.mSize, aSurface.mFormat);
1105 return source;
1106 }
1108 return nullptr;
1109 }
1111 TemporaryRef<DrawTarget>
1112 DrawTargetCairo::CreateSimilarDrawTarget(const IntSize &aSize, SurfaceFormat aFormat) const
1113 {
1114 cairo_surface_t* similar = cairo_surface_create_similar(cairo_get_target(mContext),
1115 GfxFormatToCairoContent(aFormat),
1116 aSize.width, aSize.height);
1118 if (!cairo_surface_status(similar)) {
1119 RefPtr<DrawTargetCairo> target = new DrawTargetCairo();
1120 target->InitAlreadyReferenced(similar, aSize);
1121 return target;
1122 }
1124 return nullptr;
1125 }
1127 bool
1128 DrawTargetCairo::InitAlreadyReferenced(cairo_surface_t* aSurface, const IntSize& aSize, SurfaceFormat* aFormat)
1129 {
1130 mContext = cairo_create(aSurface);
1131 mSurface = aSurface;
1132 mSize = aSize;
1133 mFormat = aFormat ? *aFormat : CairoContentToGfxFormat(cairo_surface_get_content(aSurface));
1135 if (mFormat == SurfaceFormat::B8G8R8A8 ||
1136 mFormat == SurfaceFormat::R8G8B8A8) {
1137 SetPermitSubpixelAA(false);
1138 } else {
1139 SetPermitSubpixelAA(true);
1140 }
1142 return true;
1143 }
1145 TemporaryRef<DrawTarget>
1146 DrawTargetCairo::CreateShadowDrawTarget(const IntSize &aSize, SurfaceFormat aFormat,
1147 float aSigma) const
1148 {
1149 cairo_surface_t* similar = cairo_surface_create_similar(cairo_get_target(mContext),
1150 GfxFormatToCairoContent(aFormat),
1151 aSize.width, aSize.height);
1153 if (cairo_surface_status(similar)) {
1154 return nullptr;
1155 }
1157 // If we don't have a blur then we can use the RGBA mask and keep all the
1158 // operations in graphics memory.
1159 if (aSigma == 0.0F) {
1160 RefPtr<DrawTargetCairo> target = new DrawTargetCairo();
1161 target->InitAlreadyReferenced(similar, aSize);
1162 return target;
1163 }
1165 cairo_surface_t* blursurf = cairo_image_surface_create(CAIRO_FORMAT_A8,
1166 aSize.width,
1167 aSize.height);
1169 if (cairo_surface_status(blursurf)) {
1170 return nullptr;
1171 }
1173 cairo_surface_t* tee = cairo_tee_surface_create(blursurf);
1174 cairo_surface_destroy(blursurf);
1175 if (cairo_surface_status(tee)) {
1176 cairo_surface_destroy(similar);
1177 return nullptr;
1178 }
1180 cairo_tee_surface_add(tee, similar);
1181 cairo_surface_destroy(similar);
1183 RefPtr<DrawTargetCairo> target = new DrawTargetCairo();
1184 target->InitAlreadyReferenced(tee, aSize);
1185 return target;
1186 }
1188 bool
1189 DrawTargetCairo::Init(cairo_surface_t* aSurface, const IntSize& aSize, SurfaceFormat* aFormat)
1190 {
1191 cairo_surface_reference(aSurface);
1192 return InitAlreadyReferenced(aSurface, aSize, aFormat);
1193 }
1195 bool
1196 DrawTargetCairo::Init(const IntSize& aSize, SurfaceFormat aFormat)
1197 {
1198 cairo_surface_t *surf = cairo_image_surface_create(GfxFormatToCairoFormat(aFormat), aSize.width, aSize.height);
1199 return InitAlreadyReferenced(surf, aSize);
1200 }
1202 bool
1203 DrawTargetCairo::Init(unsigned char* aData, const IntSize &aSize, int32_t aStride, SurfaceFormat aFormat)
1204 {
1205 cairo_surface_t* surf =
1206 cairo_image_surface_create_for_data(aData,
1207 GfxFormatToCairoFormat(aFormat),
1208 aSize.width,
1209 aSize.height,
1210 aStride);
1211 return InitAlreadyReferenced(surf, aSize);
1212 }
1214 void *
1215 DrawTargetCairo::GetNativeSurface(NativeSurfaceType aType)
1216 {
1217 if (aType == NativeSurfaceType::CAIRO_SURFACE) {
1218 return cairo_get_target(mContext);
1219 }
1220 if (aType == NativeSurfaceType::CAIRO_CONTEXT) {
1221 return mContext;
1222 }
1224 return nullptr;
1225 }
1227 void
1228 DrawTargetCairo::MarkSnapshotIndependent()
1229 {
1230 if (mSnapshot) {
1231 if (mSnapshot->refCount() > 1) {
1232 // We only need to worry about snapshots that someone else knows about
1233 mSnapshot->DrawTargetWillChange();
1234 }
1235 mSnapshot = nullptr;
1236 }
1237 }
1239 void
1240 DrawTargetCairo::WillChange(const Path* aPath /* = nullptr */)
1241 {
1242 MarkSnapshotIndependent();
1243 MOZ_ASSERT(!mLockedBits);
1244 }
1246 void
1247 DrawTargetCairo::SetTransform(const Matrix& aTransform)
1248 {
1249 mTransform = aTransform;
1251 cairo_matrix_t mat;
1252 GfxMatrixToCairoMatrix(mTransform, mat);
1253 cairo_set_matrix(mContext, &mat);
1254 }
1256 Rect
1257 DrawTargetCairo::GetUserSpaceClip()
1258 {
1259 double clipX1, clipY1, clipX2, clipY2;
1260 cairo_clip_extents(mContext, &clipX1, &clipY1, &clipX2, &clipY2);
1261 return Rect(clipX1, clipY1, clipX2 - clipX1, clipY2 - clipY1); // Narrowing of doubles to floats
1262 }
1264 cairo_t*
1265 BorrowedCairoContext::BorrowCairoContextFromDrawTarget(DrawTarget* aDT)
1266 {
1267 if (aDT->GetType() != BackendType::CAIRO || aDT->IsDualDrawTarget()) {
1268 return nullptr;
1269 }
1270 DrawTargetCairo* cairoDT = static_cast<DrawTargetCairo*>(aDT);
1272 cairoDT->WillChange();
1274 // save the state to make it easier for callers to avoid mucking with things
1275 cairo_save(cairoDT->mContext);
1277 // Neuter the DrawTarget while the context is being borrowed
1278 cairo_t* cairo = cairoDT->mContext;
1279 cairoDT->mContext = nullptr;
1281 return cairo;
1282 }
1284 void
1285 BorrowedCairoContext::ReturnCairoContextToDrawTarget(DrawTarget* aDT,
1286 cairo_t* aCairo)
1287 {
1288 if (aDT->GetType() != BackendType::CAIRO || aDT->IsDualDrawTarget()) {
1289 return;
1290 }
1291 DrawTargetCairo* cairoDT = static_cast<DrawTargetCairo*>(aDT);
1293 cairo_restore(aCairo);
1294 cairoDT->mContext = aCairo;
1295 }
1297 }
1298 }