gralloc: Use GRALLOC_USAGE_PROTECTED for L1 protection

Do not require the MM_HEAP flag for TZ protection. This is in
line with the gralloc spec.

Bug: 27536318

Change-Id: I0bc7346b0a40061600707d8c1881a9d405995b4f
This commit is contained in:
Naseer Ahmed 2016-04-11 16:50:01 -04:00 committed by Steve Pfetsch
parent 452f4e8628
commit 4f6feb11ed
6 changed files with 29 additions and 52 deletions

View File

@ -78,6 +78,8 @@ static bool canFallback(int usage, bool triedSystem)
static bool useUncached(int usage)
{
if (usage & GRALLOC_USAGE_PROTECTED)
return true;
if (usage & GRALLOC_USAGE_PRIVATE_UNCACHED)
return true;
if(((usage & GRALLOC_USAGE_SW_WRITE_MASK) == GRALLOC_USAGE_SW_WRITE_RARELY)
@ -298,14 +300,8 @@ int IonController::allocate(alloc_data& data, int usage)
ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
if(usage & GRALLOC_USAGE_PROTECTED) {
if (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
ionFlags |= ION_SECURE;
} else {
// for targets/OEMs which do not need HW level protection
// do not set ion secure flag & MM heap. Fallback to IOMMU heap.
ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
}
ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
ionFlags |= ION_SECURE;
} else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
//MM Heap is exclusively a secure heap.
//If it is used for non secure cases, fallback to IOMMU heap

View File

@ -70,8 +70,7 @@ int gpu_context_t::gralloc_alloc_buffer(size_t size, int usage,
/* force 1MB alignment selectively for secure buffers, MDP5 onwards */
#ifdef MDSS_TARGET
if ((usage & GRALLOC_USAGE_PROTECTED) &&
(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP)) {
if (usage & GRALLOC_USAGE_PROTECTED) {
data.align = ALIGN((int) data.align, SZ_2M);
size = ALIGN(size, data.align);
}

View File

@ -84,11 +84,10 @@ static bool canFallback(int usage, bool triedSystem)
* read or written in software. Any combination with a _RARELY_ flag will be
* treated as uncached. */
static bool useUncached(const int& usage) {
if((usage & GRALLOC_USAGE_PRIVATE_UNCACHED) or
((usage & GRALLOC_USAGE_SW_WRITE_MASK) ==
GRALLOC_USAGE_SW_WRITE_RARELY) or
((usage & GRALLOC_USAGE_SW_READ_MASK) ==
GRALLOC_USAGE_SW_READ_RARELY))
if ((usage & GRALLOC_USAGE_PROTECTED) or
(usage & GRALLOC_USAGE_PRIVATE_UNCACHED) or
((usage & GRALLOC_USAGE_SW_WRITE_MASK) == GRALLOC_USAGE_SW_WRITE_RARELY) or
((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_RARELY))
return true;
return false;
@ -383,20 +382,13 @@ int IonController::allocate(alloc_data& data, int usage)
ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
if(usage & GRALLOC_USAGE_PROTECTED) {
if (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
ionFlags |= ION_SECURE;
ionFlags |= ION_HEAP(ION_CP_MM_HEAP_ID);
ionFlags |= ION_SECURE;
#ifdef ION_FLAG_ALLOW_NON_CONTIG
if (!(usage & GRALLOC_USAGE_PRIVATE_SECURE_DISPLAY)) {
ionFlags |= ION_FLAG_ALLOW_NON_CONTIG;
}
#endif
} else {
// for targets/OEMs which do not need HW level protection
// do not set ion secure flag & MM heap. Fallback to IOMMU heap.
ionFlags |= ION_HEAP(ION_IOMMU_HEAP_ID);
data.allocType |= private_handle_t::PRIV_FLAGS_PROTECTED_BUFFER;
if (!(usage & GRALLOC_USAGE_PRIVATE_SECURE_DISPLAY)) {
ionFlags |= ION_FLAG_ALLOW_NON_CONTIG;
}
#endif
} else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
//MM Heap is exclusively a secure heap.
//If it is used for non secure cases, fallback to IOMMU heap

View File

@ -66,8 +66,7 @@ int gpu_context_t::gralloc_alloc_buffer(unsigned int size, int usage,
/* force 1MB alignment selectively for secure buffers, MDP5 onwards */
#ifdef MDSS_TARGET
if ((usage & GRALLOC_USAGE_PROTECTED) &&
(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP)) {
if (usage & GRALLOC_USAGE_PROTECTED) {
data.align = ALIGN((int) data.align, SZ_1M);
size = ALIGN(size, data.align);
}

View File

@ -87,11 +87,10 @@ static unsigned int getUBwcSize(int, int, int, const int, const int);
* read or written in software. Any combination with a _RARELY_ flag will be
* treated as uncached. */
static bool useUncached(const int& usage) {
if((usage & GRALLOC_USAGE_PRIVATE_UNCACHED) or
((usage & GRALLOC_USAGE_SW_WRITE_MASK) ==
GRALLOC_USAGE_SW_WRITE_RARELY) or
((usage & GRALLOC_USAGE_SW_READ_MASK) ==
GRALLOC_USAGE_SW_READ_RARELY))
if ((usage & GRALLOC_USAGE_PROTECTED) or
(usage & GRALLOC_USAGE_PRIVATE_UNCACHED) or
((usage & GRALLOC_USAGE_SW_WRITE_MASK) == GRALLOC_USAGE_SW_WRITE_RARELY) or
((usage & GRALLOC_USAGE_SW_READ_MASK) == GRALLOC_USAGE_SW_READ_RARELY))
return true;
return false;
@ -447,23 +446,16 @@ int IonController::allocate(alloc_data& data, int usage)
data.allocType = 0;
if(usage & GRALLOC_USAGE_PROTECTED) {
if (usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
if (usage & GRALLOC_USAGE_PRIVATE_SECURE_DISPLAY) {
ionHeapId = ION_HEAP(SD_HEAP_ID);
/*
* There is currently no flag in ION for Secure Display
* VM. Please add it to the define once available.
*/
ionFlags |= ION_SD_FLAGS;
} else {
ionHeapId = ION_HEAP(CP_HEAP_ID);
ionFlags |= ION_CP_FLAGS;
}
if (usage & GRALLOC_USAGE_PRIVATE_SECURE_DISPLAY) {
ionHeapId = ION_HEAP(SD_HEAP_ID);
/*
* There is currently no flag in ION for Secure Display
* VM. Please add it to the define once available.
*/
ionFlags |= ION_SD_FLAGS;
} else {
// for targets/OEMs which do not need HW level protection
// do not set ion secure flag & MM heap. Fallback to system heap.
ionHeapId |= ION_HEAP(ION_SYSTEM_HEAP_ID);
data.allocType |= private_handle_t::PRIV_FLAGS_PROTECTED_BUFFER;
ionHeapId = ION_HEAP(CP_HEAP_ID);
ionFlags |= ION_CP_FLAGS;
}
} else if(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP) {
//MM Heap is exclusively a secure heap.

View File

@ -63,8 +63,7 @@ int gpu_context_t::gralloc_alloc_buffer(unsigned int size, int usage,
else
data.align = getpagesize();
if ((usage & GRALLOC_USAGE_PROTECTED) &&
(usage & GRALLOC_USAGE_PRIVATE_MM_HEAP)) {
if (usage & GRALLOC_USAGE_PROTECTED) {
if (usage & GRALLOC_USAGE_PRIVATE_SECURE_DISPLAY) {
/* The alignment here reflects qsee mmu V7L/V8L requirement */
data.align = SZ_2M;